Encode jpeg file ok but need to improve.

This commit is contained in:
Fancy code 2024-07-19 22:30:00 +08:00
parent 7093b61fe9
commit a4bd40a847
6 changed files with 138 additions and 48 deletions

View File

@ -33,6 +33,54 @@ $ ffplay -f s16le -ar 8000 -ac 1 test.pcm
$ ./ffmpeg -i test.h264 -vframes 1 -vf "scale=640:480:force_original_aspect_ratio=decrease" -f image2 output.jpeg $ ./ffmpeg -i test.h264 -vframes 1 -vf "scale=640:480:force_original_aspect_ratio=decrease" -f image2 output.jpeg
``` ```
```code
void FfmpegThumbnail::GetDecodeDataCallback(AVFrame *frame)
{
LogInfo("GetDecodeDataCallback frame->width = %d, frame->height = %d\n", frame->width, frame->height);
// Allocate output frame for YUV420P format
AVFrame *output_frame = av_frame_alloc();
output_frame->format = AV_PIX_FMT_YUV420P;
output_frame->width = 640;
output_frame->height = 480;
// Calculate buffer size for YUV420P
int yuv_buf_size = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, 640, 480, 1);
uint8_t *yuv_buf = (uint8_t *)av_malloc(yuv_buf_size);
// Fill output frame with YUV420P buffer
av_image_fill_arrays(output_frame->data, output_frame->linesize, yuv_buf, AV_PIX_FMT_YUV420P, 640, 480, 1);
// Create SwsContext for pixel format conversion from YUV420P (1920x2160) to YUV420P (640x480)
SwsContext *sws_ctx = sws_getContext(frame->width, frame->height, static_cast<AVPixelFormat>(frame->format),
output_frame->width, output_frame->height, AV_PIX_FMT_YUV420P,
SWS_BILINEAR, nullptr, nullptr, nullptr);
if (!sws_ctx) {
LogError("Failed to create SwsContext for pixel format conversion\n");
av_frame_free(&output_frame);
av_free(yuv_buf);
return;
}
// Perform pixel format conversion and scaling
sws_scale(sws_ctx, frame->data, frame->linesize, 0, frame->height,
output_frame->data, output_frame->linesize);
// Clean up SwsContext
sws_freeContext(sws_ctx);
// Encode the YUV420P frame to JPEG using mEncoder
if (mEncoder) {
mEncoder->EncodeData(output_frame, mStream, mEncodeCallback);
}
// Free allocated resources
av_frame_free(&output_frame);
av_free(yuv_buf);
}
```
* 将h264和wav文件合成mp4文件 * 将h264和wav文件合成mp4文件
**注意未发现可以将h264和g711a文件合成mp4文件** **注意未发现可以将h264和g711a文件合成mp4文件**

View File

@ -78,6 +78,11 @@ bool FfmpegDecoder::Init(void)
return false; return false;
} }
} }
else {
mCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
mCodecCtx->width = 1920;
mCodecCtx->height = 2610;
}
if ((ret = avcodec_open2(mCodecCtx, mCodec, nullptr)) < 0) { if ((ret = avcodec_open2(mCodecCtx, mCodec, nullptr)) < 0) {
char error_str[AV_ERROR_MAX_STRING_SIZE] = {0}; char error_str[AV_ERROR_MAX_STRING_SIZE] = {0};
LogError("Could not open codec:%s\n", av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, ret)); LogError("Could not open codec:%s\n", av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, ret));
@ -107,18 +112,22 @@ bool FfmpegDecoder::Init(void)
return false; return false;
} }
} }
LogInfo("init success pix_fmt = %d\n", mCodecCtx->pix_fmt);
return true; return true;
} }
bool FfmpegDecoder::UnInit(void) bool FfmpegDecoder::UnInit(void)
{ {
LogInfo("uninit %s\n", avcodec_get_name(mCodecId));
if (mFrame) { if (mFrame) {
av_frame_free(&mFrame); av_frame_free(&mFrame);
mFrame = nullptr; mFrame = nullptr;
} }
if (mCodecCtx) { if (mCodecCtx) {
if (mCodecId != AV_CODEC_ID_H264) {
avcodec_free_context(&mCodecCtx); avcodec_free_context(&mCodecCtx);
mCodecCtx = nullptr; mCodecCtx = nullptr;
} }
}
av_packet_free(&mPacket); av_packet_free(&mPacket);
mPacket = nullptr; mPacket = nullptr;
if (mParser) { if (mParser) {
@ -174,11 +183,11 @@ void inline FfmpegDecoder::AVParseData(const void *data, const size_t &size,
} }
// static void save_code_stream_file(const void *data, const size_t &size) // static void save_code_stream_file(const void *data, const size_t &size)
// { // {
// char OutPath[16]; // char OutPath[128] = {0};
// const void *pData = data; // const void *pData = data;
// FILE *file = NULL; // FILE *file = NULL;
// LogInfo("save_code_stream_file size = %d\n", size);
// sprintf(OutPath, "./test.pcm"); // sprintf(OutPath, "./test.yuv");
// file = fopen(OutPath, "a+"); // file = fopen(OutPath, "a+");
// if (file) { // TODO: Don't open very time. // if (file) { // TODO: Don't open very time.
@ -189,6 +198,17 @@ void inline FfmpegDecoder::AVParseData(const void *data, const size_t &size,
// if (file) // if (file)
// fclose(file); // fclose(file);
// } // }
// static void pgm_save(unsigned char *buf, int wrap, int xsize, int ysize, char *filename)
// {
// FILE *f;
// int i;
// f = fopen(filename, "wb");
// fprintf(f, "P5\n%d %d\n%d\n", xsize, ysize, 255);
// for (i = 0; i < ysize; i++)
// fwrite(buf + i * wrap, 1, xsize, f);
// fclose(f);
// }
void inline FfmpegDecoder::AVDecodeData(AVPacket *pkt, std::function<void(AVFrame *frame)> callback) void inline FfmpegDecoder::AVDecodeData(AVPacket *pkt, std::function<void(AVFrame *frame)> callback)
{ {
int ret = avcodec_send_packet(mCodecCtx, pkt); int ret = avcodec_send_packet(mCodecCtx, pkt);
@ -213,7 +233,10 @@ void inline FfmpegDecoder::AVDecodeData(AVPacket *pkt, std::function<void(AVFram
// for (ch = 0; ch < mCodecCtx->ch_layout.nb_channels; ch++) // for (ch = 0; ch < mCodecCtx->ch_layout.nb_channels; ch++)
// // fwrite(frame->data[ch] + data_size * i, 1, data_size, outfile); // // fwrite(frame->data[ch] + data_size * i, 1, data_size, outfile);
// save_code_stream_file(mFrame->data[ch] + data_size * i, data_size); // save_code_stream_file(mFrame->data[ch] + data_size * i, data_size);
// save_code_stream_file(mFrame->data[0], mFrame->linesize[0]); // if (mCodecId == AV_CODEC_ID_H264) {
// // save_code_stream_file(mFrame->data[0], mFrame->linesize[0]);
// pgm_save(mFrame->data[0], mFrame->linesize[0], mFrame->width, mFrame->height, "./test.yuv");
// }
// LogInfo("decode frame pts = %llu, nb_samples = %d\n", mFrame->pts, mFrame->nb_samples); // LogInfo("decode frame pts = %llu, nb_samples = %d\n", mFrame->pts, mFrame->nb_samples);
callback(mFrame); callback(mFrame);
} }

View File

@ -92,10 +92,10 @@ bool FfmpegEncoder::Init(int &outputFlags)
case AVMEDIA_TYPE_VIDEO: case AVMEDIA_TYPE_VIDEO:
mCodecCtx->codec_id = mCodecId; mCodecCtx->codec_id = mCodecId;
mCodecCtx->bit_rate = 400000; mCodecCtx->bit_rate = 300000;
/* Resolution must be a multiple of two. */ /* Resolution must be a multiple of two. */
mCodecCtx->width = 1920; mCodecCtx->width = 640;
mCodecCtx->height = 2160; mCodecCtx->height = 480;
/* timebase: This is the fundamental unit of time (in seconds) in terms /* timebase: This is the fundamental unit of time (in seconds) in terms
* of which frame timestamps are represented. For fixed-fps content, * of which frame timestamps are represented. For fixed-fps content,
* timebase should be 1/framerate and timestamp increments should be * timebase should be 1/framerate and timestamp increments should be
@ -103,7 +103,7 @@ bool FfmpegEncoder::Init(int &outputFlags)
mCodecCtx->time_base = (AVRational){1, STREAM_FRAME_RATE}; mCodecCtx->time_base = (AVRational){1, STREAM_FRAME_RATE};
mCodecCtx->gop_size = 12; /* emit one intra frame every twelve frames at most */ mCodecCtx->gop_size = 12; /* emit one intra frame every twelve frames at most */
mCodecCtx->pix_fmt = STREAM_PIX_FMT; mCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
if (mCodecCtx->codec_id == AV_CODEC_ID_MPEG2VIDEO) { if (mCodecCtx->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
/* just for testing, we also add B-frames */ /* just for testing, we also add B-frames */
mCodecCtx->max_b_frames = 2; mCodecCtx->max_b_frames = 2;
@ -170,23 +170,23 @@ bool FfmpegEncoder::OpenEncoder(AVDictionary *optArg, AVStream *stream)
return false; return false;
} }
} }
// static void save_code_stream_file(const void *data, const size_t &size) static void save_code_stream_file(const void *data, const size_t &size)
// { {
// char OutPath[16]; char OutPath[16];
// const void *pData = data; const void *pData = data;
// FILE *file = NULL; FILE *file = NULL;
// LogInfo("save_code_stream_file: %d\n", size); LogInfo("save_code_stream_file: %d\n", size);
// sprintf(OutPath, "./test.aac"); sprintf(OutPath, "./test.jpg");
// file = fopen(OutPath, "a+"); file = fopen(OutPath, "a+");
// if (file) { // TODO: Don't open very time. if (file) { // TODO: Don't open very time.
// fwrite(pData, 1, size, file); fwrite(pData, 1, size, file);
// fflush(file); fflush(file);
// } }
// if (file) if (file)
// fclose(file); fclose(file);
// } }
int FfmpegEncoder::EncodeData(AVFrame *frame, AVStream *stream, std::function<void(AVPacket *pkt)> callback) int FfmpegEncoder::EncodeData(AVFrame *frame, AVStream *stream, std::function<void(AVPacket *pkt)> callback)
{ {
int ret = 0; int ret = 0;
@ -228,7 +228,9 @@ int FfmpegEncoder::EncodeData(AVFrame *frame, AVStream *stream, std::function<vo
// LogInfo("aaaaaaaaaaaaaaaaaaaaaaa Write frame mTmpPkt->pts: %llu\n", mTmpPkt->pts); // LogInfo("aaaaaaaaaaaaaaaaaaaaaaa Write frame mTmpPkt->pts: %llu\n", mTmpPkt->pts);
if (callback) { if (callback) {
// save_code_stream_file(mTmpPkt->data, mTmpPkt->size); if (mCodecId == AV_CODEC_ID_MJPEG) {
save_code_stream_file(mTmpPkt->data, mTmpPkt->size);
}
callback(mTmpPkt); callback(mTmpPkt);
} }
} }
@ -269,7 +271,7 @@ bool FfmpegEncoder::OpenVideo(AVDictionary *optArg, AVStream *stream)
LogError("Could not copy the stream parameters\n"); LogError("Could not copy the stream parameters\n");
return false; return false;
} }
LogInfo("Open video success\n"); LogInfo(" Open video success, mCodecCtx->pix_fmt = %d\n", mCodecCtx->pix_fmt);
return true; return true;
} }
bool FfmpegEncoder::OpenAudio(AVDictionary *optArg, AVStream *stream) bool FfmpegEncoder::OpenAudio(AVDictionary *optArg, AVStream *stream)

View File

@ -38,7 +38,8 @@ extern "C" {
#include <string.h> #include <string.h>
#include <thread> #include <thread>
FfmpegOutputStream::FfmpegOutputStream(const AVCodecID &encodecId, const AVCodecID &dncodecId) FfmpegOutputStream::FfmpegOutputStream(const AVCodecID &encodecId, const AVCodecID &dncodecId)
: mEncodecId(encodecId), mDeccodecId(dncodecId), mTmpPkt(nullptr), mStream(nullptr), mStreamHeaderWritten(false) : mEncodecId(encodecId), mDeccodecId(dncodecId), mTmpPkt(nullptr), mStream(nullptr), mStreamHeaderWritten(false),
mH264Data2Jpeg(nullptr)
{ {
} }
bool FfmpegOutputStream::Init(AVFormatContext *outputFormat) bool FfmpegOutputStream::Init(AVFormatContext *outputFormat)
@ -97,6 +98,10 @@ void FfmpegOutputStream::UnInit(void)
if (mThumbnailThread.joinable()) { if (mThumbnailThread.joinable()) {
mThumbnailThread.join(); mThumbnailThread.join();
} }
if (mH264Data2Jpeg) {
free(mH264Data2Jpeg);
mH264Data2Jpeg = nullptr;
}
} }
void FfmpegOutputStream::WriteSourceData(const void *data, const size_t &size, const unsigned long long &pts) void FfmpegOutputStream::WriteSourceData(const void *data, const size_t &size, const unsigned long long &pts)
{ {
@ -147,9 +152,9 @@ bool FfmpegOutputStream::CheckStreamHeader(const void *data, const size_t &size)
return false; return false;
} }
LogInfo("Found extradata\n"); LogInfo("Found extradata\n");
static char *h264data = (char *)malloc(size + 1); mH264Data2Jpeg = (char *)malloc(size + 1);
memcpy(h264data, data, size); memcpy(mH264Data2Jpeg, data, size);
FfmpegOutputStream::CreateThumbnailFile(h264data, size); FfmpegOutputStream::CreateThumbnailFile(mH264Data2Jpeg, size);
memcpy(extradata, pData, i); memcpy(extradata, pData, i);
mStream->codecpar->extradata = extradata; mStream->codecpar->extradata = extradata;
mStream->codecpar->extradata_size = i; mStream->codecpar->extradata_size = i;
@ -168,9 +173,10 @@ void FfmpegOutputStream::GetDecodeDataCallback(AVFrame *frame)
} }
void FfmpegOutputStream::CreateThumbnailFile(const void *frame, const size_t &size) void FfmpegOutputStream::CreateThumbnailFile(const void *frame, const size_t &size)
{ {
auto thumbnailThread = [=](std::shared_ptr<FfmpegOutputStream> impl, const void *frame, const size_t size) { auto thumbnailThread =
[](std::shared_ptr<FfmpegOutputStream> output, const void *frameData, const size_t dataSize) {
LogInfo("CreateThumbnailFile start.\n"); LogInfo("CreateThumbnailFile start.\n");
impl->CreateThumbnailFileThread(frame, size); output->CreateThumbnailFileThread(frameData, dataSize);
}; };
std::shared_ptr<FfmpegOutputStream> impl = shared_from_this(); std::shared_ptr<FfmpegOutputStream> impl = shared_from_this();
mThumbnailThread = std::thread(thumbnailThread, impl, frame, size); mThumbnailThread = std::thread(thumbnailThread, impl, frame, size);

View File

@ -69,5 +69,6 @@ private:
std::function<void(AVPacket *)> mEncodeCallback; std::function<void(AVPacket *)> mEncodeCallback;
bool mStreamHeaderWritten; bool mStreamHeaderWritten;
std::thread mThumbnailThread; std::thread mThumbnailThread;
char *mH264Data2Jpeg;
}; };
#endif #endif

View File

@ -45,21 +45,19 @@ void FfmpegThumbnail::Init(void)
LogInfo("FfmpegThumbnail Init\n"); LogInfo("FfmpegThumbnail Init\n");
mDecodeCallback = std::bind(&FfmpegThumbnail::GetDecodeDataCallback, this, std::placeholders::_1); mDecodeCallback = std::bind(&FfmpegThumbnail::GetDecodeDataCallback, this, std::placeholders::_1);
mEncodeCallback = std::bind(&FfmpegThumbnail::GetEncodeDataCallback, this, std::placeholders::_1); mEncodeCallback = std::bind(&FfmpegThumbnail::GetEncodeDataCallback, this, std::placeholders::_1);
sws_ctx =
sws_getContext(1920, 2160, AV_PIX_FMT_YUV420P, 1920, 2160, AV_PIX_FMT_YUV420P, SWS_BILINEAR, NULL, NULL, NULL);
} }
void FfmpegThumbnail::UnInit(void) void FfmpegThumbnail::UnInit(void)
{ {
if (mDecoder) { if (mOutputFormat && mOutputFormat->pb) {
mDecoder->UnInit(); av_write_trailer(mOutputFormat);
mDecoder.reset();
} }
if (mEncoder) { if (mEncoder) {
mEncoder->UnInit(); mEncoder->UnInit();
mEncoder.reset(); mEncoder.reset();
} }
if (mOutputFormat && mOutputFormat->pb) { if (mDecoder) {
av_write_trailer(mOutputFormat); mDecoder->UnInit();
mDecoder.reset();
} }
if (nullptr == mOutputFormat) { if (nullptr == mOutputFormat) {
return; return;
@ -140,23 +138,34 @@ bool FfmpegThumbnail::CreateThumbnail(const std::string &outputFile, const void
mEncoder->OpenEncoder(nullptr, mStream); mEncoder->OpenEncoder(nullptr, mStream);
LogInfo("Start to decode data\n"); LogInfo("Start to decode data\n");
mDecoder->DecodeData(data, size, AV_NOPTS_VALUE, mDecodeCallback); mDecoder->DecodeData(data, size, AV_NOPTS_VALUE, mDecodeCallback);
LogInfo("Decode data end\n");
return false; return false;
} }
void FfmpegThumbnail::GetDecodeDataCallback(AVFrame *frame) void FfmpegThumbnail::GetDecodeDataCallback(AVFrame *frame)
{ {
LogInfo("GetDecodeDataCallback\n"); LogInfo("GetDecodeDataCallback frame->width = %d, frame->height=%d\n", frame->width, frame->height);
AVFrame *output_frame = av_frame_alloc(); AVFrame *output_frame = av_frame_alloc();
int jpeg_buf_size = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, frame->width, frame->height, 1); output_frame->format = AV_PIX_FMT_YUV420P;
output_frame->width = 640;
output_frame->height = 480;
int jpeg_buf_size = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, 640, 480, 1);
LogInfo("jpeg_buf_size: %d\n", jpeg_buf_size); LogInfo("jpeg_buf_size: %d\n", jpeg_buf_size);
uint8_t *jpeg_buf = (uint8_t *)av_malloc(jpeg_buf_size); uint8_t *jpeg_buf = (uint8_t *)av_malloc(jpeg_buf_size);
av_image_fill_arrays( av_image_fill_arrays(
output_frame->data, output_frame->linesize, jpeg_buf, AV_PIX_FMT_YUV420P, frame->width, frame->height, 1); output_frame->data, output_frame->linesize, jpeg_buf, AV_PIX_FMT_YUV420P, frame->width, frame->height, 1);
sws_ctx = sws_getContext(1920,
2160,
static_cast<AVPixelFormat>(frame->format),
output_frame->width,
output_frame->height,
AV_PIX_FMT_YUV420P,
SWS_BILINEAR,
NULL,
NULL,
NULL);
// 进行像素格式转换 // 进行像素格式转换
sws_scale(sws_ctx, frame->data, frame->linesize, 0, 2160, output_frame->data, output_frame->linesize); sws_scale(sws_ctx, frame->data, frame->linesize, 0, frame->height, output_frame->data, output_frame->linesize);
output_frame->format = AV_PIX_FMT_YUV420P;
output_frame->width = 1920;
output_frame->height = 2160;
if (mEncoder) { if (mEncoder) {
mEncoder->EncodeData(output_frame, mStream, mEncodeCallback); mEncoder->EncodeData(output_frame, mStream, mEncodeCallback);
@ -165,6 +174,7 @@ void FfmpegThumbnail::GetDecodeDataCallback(AVFrame *frame)
} }
void FfmpegThumbnail::GetEncodeDataCallback(AVPacket *pkt) void FfmpegThumbnail::GetEncodeDataCallback(AVPacket *pkt)
{ {
return;
LogInfo("ggggggggggggggggggggggggggggggggggggggg GetEncodeDataCallback %d\n", pkt->size); LogInfo("ggggggggggggggggggggggggggggggggggggggg GetEncodeDataCallback %d\n", pkt->size);
int ret = 0; int ret = 0;
ret = av_interleaved_write_frame(mOutputFormat, pkt); ret = av_interleaved_write_frame(mOutputFormat, pkt);