Backup:thumbnail test.

This commit is contained in:
Fancy code 2024-07-19 10:38:29 +08:00
parent 3c3b934bd1
commit 7093b61fe9
15 changed files with 331 additions and 11 deletions

View File

@ -44,7 +44,10 @@ void LedsHandle::ControlDeviceStatusLed(const DeviceStatus &status, const long i
}
void inline LedsHandle::DeleteDeviceStatusLed(void)
{
mDeviceStatus->DeleteState();
if (mDeviceStatus) {
mDeviceStatus->DeleteState();
mDeviceStatus.reset();
}
}
void LedsHandle::DeleteAllLeds(void)
{

View File

@ -60,6 +60,22 @@ std::string MediaTask::GetTargetNameForSaving(void)
LogInfo("GetTargetNameForSaving: %s\n", pathStream.str().c_str());
return pathStream.str();
}
std::string MediaTask::GetThumbnailNameForSaving(const std::string &targetName)
{
std::string thumbnailName = targetName;
size_t dot_pos = thumbnailName.find_last_of('.');
if (dot_pos != std::string::npos) {
std::string extension = thumbnailName.substr(dot_pos);
if (extension == ".mp4") {
thumbnailName.replace(dot_pos, extension.length(), ".jpeg");
LogInfo("GetThumbnailNameForSaving: %s\n", thumbnailName.c_str());
return thumbnailName;
}
}
LogError("TargetName is not a mp4 file.\n");
std::string unknowFile = "unknow";
return unknowFile;
}
void MediaTask::Response(const std::vector<MediaTaskResponse> &response)
{
LogInfo("Response handle.\n");

View File

@ -17,6 +17,7 @@
#include "DataProcessing.h"
#include "IMediaManager.h"
#include <string>
#include <vector>
constexpr unsigned int MEDIA_TASK_TIMEOUT_MS = 1000 * 60;
typedef struct media_task_info
{
@ -40,6 +41,7 @@ public:
virtual ~MediaTask() = default;
virtual unsigned int GetTaskTimeOutMs(void);
std::string GetTargetNameForSaving(void) override;
std::string GetThumbnailNameForSaving(const std::string &targetName) override;
void Response(const std::vector<MediaTaskResponse> &response) override;
private:
@ -52,5 +54,6 @@ private:
std::shared_ptr<MediaTaskResponse> mResponseData;
std::string mTargetName;
long long mCreateTime_s;
std::vector<std::string> mTargetNameList;
};
#endif

View File

@ -27,6 +27,12 @@ $ fmpeg -f mulaw -ar 8000 -i audio.g711a audio.wav
$ ffplay -f s16le -ar 8000 -ac 1 test.pcm
```
* 生成jpeg
```code
$ ./ffmpeg -i test.h264 -vframes 1 -vf "scale=640:480:force_original_aspect_ratio=decrease" -f image2 output.jpeg
```
* 将h264和wav文件合成mp4文件
**注意未发现可以将h264和g711a文件合成mp4文件**

View File

@ -119,7 +119,7 @@ typedef struct camera_task_param
{
camera_task_param(const CameraTaskType &cameraTask);
const CameraTaskType mCameraTask;
unsigned int mVideoRecordingTimeMs;
unsigned int mVideoRecordingTimeMs; // TODO: delete?
std::shared_ptr<VCameraTaskContext> mCtx;
} CameraTaskParam;
using AudioStreamCallback = std::function<void(const void *, const unsigned int &, const unsigned long long &)>;

View File

@ -58,6 +58,7 @@ public:
virtual ~VMediaTask() = default;
virtual const MediaTaskType GetTaskType(void);
virtual std::string GetTargetNameForSaving(void);
virtual std::string GetThumbnailNameForSaving(const std::string &targetName);
virtual void Response(const std::vector<MediaTaskResponse> &response);
virtual bool IsTaskFinished(void);
virtual const signed int GetIsNight(void);

View File

@ -35,6 +35,11 @@ std::string VMediaTask::GetTargetNameForSaving(void)
const std::string fileName = "Undefined";
return fileName;
}
std::string VMediaTask::GetThumbnailNameForSaving(const std::string &targetName)
{
const std::string fileName = "Undefined";
return fileName;
}
void VMediaTask::Response(const std::vector<MediaTaskResponse> &response)
{
}

View File

@ -38,6 +38,7 @@ StatusCode RecordMp4::Init(void)
return CreateStatusCode(STATUS_CODE_NOT_OK);
}
std::string videoPath = mRecordTask->GetTargetNameForSaving();
std::string thumbnailPath = mRecordTask->GetThumbnailNameForSaving(videoPath);
OutputFileInfo fileInfo = {.mDuration_ms = 5000, .mFinished = &mIsRecordingFinished};
if (OUTPUT_FILE_NAME_MAX >= videoPath.size()) {
memcpy(fileInfo.mFileName, videoPath.c_str(), videoPath.size());
@ -46,6 +47,13 @@ StatusCode RecordMp4::Init(void)
LogError("VideoPath is too long.\n");
return CreateStatusCode(STATUS_CODE_NOT_OK);
}
if (OUTPUT_FILE_NAME_MAX >= thumbnailPath.size()) {
memcpy(fileInfo.mFileName, thumbnailPath.c_str(), thumbnailPath.size());
}
else {
LogError("ThumbnailPath is too long.\n");
return CreateStatusCode(STATUS_CODE_NOT_OK);
}
StatusCode code = IOpenOutputFile(mRecordMp4Object, &fileInfo);
if (!IsCodeOK(code)) {
LogError("OpenOutputFile failed.\n");

View File

@ -51,9 +51,10 @@ enum OutputFileStatus
#define OUTPUT_FILE_NAME_MAX 256
typedef struct output_file_info
{
char mFileName[OUTPUT_FILE_NAME_MAX]; ///< Name of the output file. Must be an absolute path.
const unsigned int mDuration_ms; ///< Duration of the output file in milliseconds.
int *const mFinished; ///< Whether the output file is finished. See OutputFileStatus.
char mFileName[OUTPUT_FILE_NAME_MAX]; ///< Name of the output file. Must be an absolute path.
char mThumbnailFileName[OUTPUT_FILE_NAME_MAX]; ///< Name of the thumbnail file. Must be an absolute path.
const unsigned int mDuration_ms; ///< Duration of the output file in milliseconds.
int *const mFinished; ///< Whether the output file is finished. See OutputFileStatus.
} OutputFileInfo;
typedef void (*ReadVideoFileCallback)(const void *, const unsigned int, const unsigned long long, void *);
typedef void (*ReadAudioFileCallback)(const void *, const unsigned int, const unsigned long long, void *);

View File

@ -93,11 +93,11 @@ bool FfmpegDecoder::Init(void)
LogError("Could not allocate video frame\n");
return false;
}
mParser = av_parser_init(mCodec->id);
if (!mParser) {
LogError("mParser not found : %s\n", avcodec_get_name(mCodec->id));
return false;
}
// mParser = av_parser_init(mCodec->id);
// if (!mParser) {
// LogError("mParser not found : %s\n", avcodec_get_name(mCodec->id));
// return false;
// }
if (AVMEDIA_TYPE_AUDIO == mCodec->type) {
// mFrame->nb_samples = mCodecCtx->frame_size;
// mFrame->format = mCodecCtx->sample_fmt;

View File

@ -240,6 +240,8 @@ bool FfmpegEncoder::OpenVideo(AVDictionary *optArg, AVStream *stream)
int ret = 0;
AVDictionary *opt = nullptr;
av_dict_copy(&opt, optArg, 0);
// av_dict_set(&opt, "strict_std_compliance", "experimental", 0);
av_opt_set(mCodecCtx, "strict", "unofficial", 0); // Add for jpeg
/* open the codec */
ret = avcodec_open2(mCodecCtx, mCodec, &opt);
av_dict_free(&opt);
@ -267,6 +269,7 @@ bool FfmpegEncoder::OpenVideo(AVDictionary *optArg, AVStream *stream)
LogError("Could not copy the stream parameters\n");
return false;
}
LogInfo("Open video success\n");
return true;
}
bool FfmpegEncoder::OpenAudio(AVDictionary *optArg, AVStream *stream)

View File

@ -15,6 +15,7 @@
#include "FfmpegOutputStream.h"
#include "FfmpegDecoder.h"
#include "FfmpegEncoder.h"
#include "FfmpegThumbnail.h"
#include "ILog.h"
#ifdef __cplusplus
extern "C" {
@ -35,6 +36,7 @@ extern "C" {
#include <functional>
#include <memory>
#include <string.h>
#include <thread>
FfmpegOutputStream::FfmpegOutputStream(const AVCodecID &encodecId, const AVCodecID &dncodecId)
: mEncodecId(encodecId), mDeccodecId(dncodecId), mTmpPkt(nullptr), mStream(nullptr), mStreamHeaderWritten(false)
{
@ -92,6 +94,9 @@ void FfmpegOutputStream::UnInit(void)
mDecoder.reset();
}
av_packet_free(&mTmpPkt);
if (mThumbnailThread.joinable()) {
mThumbnailThread.join();
}
}
void FfmpegOutputStream::WriteSourceData(const void *data, const size_t &size, const unsigned long long &pts)
{
@ -142,6 +147,9 @@ bool FfmpegOutputStream::CheckStreamHeader(const void *data, const size_t &size)
return false;
}
LogInfo("Found extradata\n");
static char *h264data = (char *)malloc(size + 1);
memcpy(h264data, data, size);
FfmpegOutputStream::CreateThumbnailFile(h264data, size);
memcpy(extradata, pData, i);
mStream->codecpar->extradata = extradata;
mStream->codecpar->extradata_size = i;
@ -157,4 +165,21 @@ void FfmpegOutputStream::GetDecodeDataCallback(AVFrame *frame)
mEncoder->EncodeData(frame, mStream, mEncodeCallback);
return;
}
}
void FfmpegOutputStream::CreateThumbnailFile(const void *frame, const size_t &size)
{
auto thumbnailThread = [=](std::shared_ptr<FfmpegOutputStream> impl, const void *frame, const size_t size) {
LogInfo("CreateThumbnailFile start.\n");
impl->CreateThumbnailFileThread(frame, size);
};
std::shared_ptr<FfmpegOutputStream> impl = shared_from_this();
mThumbnailThread = std::thread(thumbnailThread, impl, frame, size);
}
void FfmpegOutputStream::CreateThumbnailFileThread(const void *frame, const size_t &size)
{
FfmpegThumbnail thumbnail(AV_CODEC_ID_MJPEG, AV_CODEC_ID_H264);
thumbnail.Init();
thumbnail.CreateThumbnail("./sssss.jpeg", frame, size);
thumbnail.UnInit();
LogInfo("CreateThumbnailFile end eeeeeeeeeeeeeeeeeeeeeeee.\n");
}

View File

@ -36,7 +36,8 @@ extern "C" {
#endif
#include <functional>
#include <memory>
class FfmpegOutputStream
#include <thread>
class FfmpegOutputStream : public std::enable_shared_from_this<FfmpegOutputStream>
{
public:
FfmpegOutputStream(const AVCodecID &encodecId, const AVCodecID &dncodecId);
@ -49,6 +50,13 @@ public:
private:
void GetDecodeDataCallback(AVFrame *frame);
/**
* @brief Create a thumbnail for the wrapped video.
* @param frame
* @param size
*/
void CreateThumbnailFile(const void *frame, const size_t &size);
void CreateThumbnailFileThread(const void *frame, const size_t &size);
private:
const AVCodecID mEncodecId;
@ -60,5 +68,6 @@ private:
std::function<void(AVFrame *)> mDecodeCallback;
std::function<void(AVPacket *)> mEncodeCallback;
bool mStreamHeaderWritten;
std::thread mThumbnailThread;
};
#endif

View File

@ -0,0 +1,179 @@
/*
* Copyright (c) 2023 Fancy Code.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "FfmpegThumbnail.h"
#include "ILog.h"
#include "LinuxApi.h"
#ifdef __cplusplus
extern "C" {
#endif
#include <libavcodec/avcodec.h>
#include <libavcodec/packet.h>
#include <libavformat/avformat.h>
#include <libavutil/avassert.h>
#include <libavutil/avutil.h>
#include <libavutil/channel_layout.h>
#include <libavutil/imgutils.h>
#include <libavutil/mathematics.h>
#include <libavutil/opt.h>
#include <libavutil/timestamp.h>
#include <libswresample/swresample.h>
#include <libswscale/swscale.h>
#ifdef __cplusplus
}
#endif
#include <functional>
FfmpegThumbnail::FfmpegThumbnail(const AVCodecID &encodecId, const AVCodecID &dncodecId)
: mOutputFormat(nullptr), mStream(nullptr), sws_ctx(nullptr)
{
mDecoder = std::make_shared<FfmpegDecoder>(dncodecId);
mEncoder = std::make_shared<FfmpegEncoder>(encodecId);
}
void FfmpegThumbnail::Init(void)
{
LogInfo("FfmpegThumbnail Init\n");
mDecodeCallback = std::bind(&FfmpegThumbnail::GetDecodeDataCallback, this, std::placeholders::_1);
mEncodeCallback = std::bind(&FfmpegThumbnail::GetEncodeDataCallback, this, std::placeholders::_1);
sws_ctx =
sws_getContext(1920, 2160, AV_PIX_FMT_YUV420P, 1920, 2160, AV_PIX_FMT_YUV420P, SWS_BILINEAR, NULL, NULL, NULL);
}
void FfmpegThumbnail::UnInit(void)
{
if (mDecoder) {
mDecoder->UnInit();
mDecoder.reset();
}
if (mEncoder) {
mEncoder->UnInit();
mEncoder.reset();
}
if (mOutputFormat && mOutputFormat->pb) {
av_write_trailer(mOutputFormat);
}
if (nullptr == mOutputFormat) {
return;
}
if (!(mOutputFormat->oformat->flags & AVFMT_NOFILE)) {
/* Close the output file. */
avio_closep(&mOutputFormat->pb);
}
avformat_free_context(mOutputFormat);
fx_system_v2("sync");
}
bool FfmpegThumbnail::CreateThumbnail(const std::string &outputFile, const void *data, const size_t &size)
{
if (!mDecoder || !mDecodeCallback) {
LogError("CreateThumbnail mDecoder && mDecodeCallback\n");
return true;
}
AVDictionary *opt = nullptr;
int ret = 0;
/* allocate the output media context */
avformat_alloc_output_context2(&mOutputFormat, nullptr, "image2", outputFile.c_str());
if (!mOutputFormat) {
LogError("Could not deduce output format from file.\n");
return false;
}
/* Add the audio and video streams using the default format codecs
* and initialize the codecs. */
if (mOutputFormat->oformat->video_codec != AV_CODEC_ID_NONE) {
mStream = avformat_new_stream(mOutputFormat, nullptr);
if (!mStream) {
LogError("Could not allocate stream\n");
return false;
}
mStream->id = mOutputFormat->nb_streams - 1;
LogInfo("Create video stream\n");
// char *pData = (char *)data;
// for (size_t i = 0; i < size; i++) {
// if ((0x00 == pData[i]) && (0x00 == pData[i + 1]) && (0x00 == pData[i + 2]) && (0x01 == pData[i + 3]) &&
// (0x5 == (pData[i + 4] & 0x1F))) {
// uint8_t *extradata = (uint8_t *)av_mallocz(i + 1);
// if (!extradata) {
// LogError("Could not allocate extradata\n");
// return false;
// }
// LogInfo("Found extradata\n");
// memcpy(extradata, pData, i);
// mStream->codecpar->extradata = extradata;
// mStream->codecpar->extradata_size = i;
// }
// }
}
av_dump_format(mOutputFormat, 0, outputFile.c_str(), 1);
/* open the output file, if needed */
// if (!(mOutputFormat->oformat->flags & AVFMT_NOFILE)) {
ret = avio_open(&mOutputFormat->pb, outputFile.c_str(), AVIO_FLAG_WRITE);
if (ret < 0) {
char error_str[AV_ERROR_MAX_STRING_SIZE] = {0};
LogError("Could not open '%s': %s\n",
outputFile.c_str(),
av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, ret));
}
LogInfo("Open output file\n");
// }
av_dict_set_int(&opt, "use_editlist", 0, 0);
/* Write the stream header, if any. */
ret = avformat_write_header(mOutputFormat, &opt);
if (ret < 0) {
char error_str[AV_ERROR_MAX_STRING_SIZE] = {0};
LogError("Error occurred when opening output file: %s\n",
av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, ret));
return false;
}
av_dict_free(&opt);
mDecoder->Init();
mEncoder->Init(mOutputFormat->flags);
mStream->time_base = mEncoder->GetTimeBase();
mEncoder->OpenEncoder(nullptr, mStream);
LogInfo("Start to decode data\n");
mDecoder->DecodeData(data, size, AV_NOPTS_VALUE, mDecodeCallback);
return false;
}
void FfmpegThumbnail::GetDecodeDataCallback(AVFrame *frame)
{
LogInfo("GetDecodeDataCallback\n");
AVFrame *output_frame = av_frame_alloc();
int jpeg_buf_size = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, frame->width, frame->height, 1);
LogInfo("jpeg_buf_size: %d\n", jpeg_buf_size);
uint8_t *jpeg_buf = (uint8_t *)av_malloc(jpeg_buf_size);
av_image_fill_arrays(
output_frame->data, output_frame->linesize, jpeg_buf, AV_PIX_FMT_YUV420P, frame->width, frame->height, 1);
// 进行像素格式转换
sws_scale(sws_ctx, frame->data, frame->linesize, 0, 2160, output_frame->data, output_frame->linesize);
output_frame->format = AV_PIX_FMT_YUV420P;
output_frame->width = 1920;
output_frame->height = 2160;
if (mEncoder) {
mEncoder->EncodeData(output_frame, mStream, mEncodeCallback);
return;
}
}
void FfmpegThumbnail::GetEncodeDataCallback(AVPacket *pkt)
{
LogInfo("ggggggggggggggggggggggggggggggggggggggg GetEncodeDataCallback %d\n", pkt->size);
int ret = 0;
ret = av_interleaved_write_frame(mOutputFormat, pkt);
/* pkt is now blank (av_interleaved_write_frame() takes ownership of
* its contents and resets pkt), so that no unreferencing is necessary.
* This would be different if one used av_write_frame(). */
if (ret < 0) {
char error_str[AV_ERROR_MAX_STRING_SIZE] = {0};
LogInfo("Error while writing output packet: %s\n",
av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, ret));
}
}

View File

@ -0,0 +1,61 @@
/*
* Copyright (c) 2023 Fancy Code.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef FFMPEG_THUMBNAIL_H
#define FFMPEG_THUMBNAIL_H
#include "FfmpegDecoder.h"
#include "FfmpegEncoder.h"
#ifdef __cplusplus
extern "C" {
#endif
#include <libavcodec/avcodec.h>
#include <libavcodec/packet.h>
#include <libavformat/avformat.h>
#include <libavutil/avassert.h>
#include <libavutil/avutil.h>
#include <libavutil/channel_layout.h>
#include <libavutil/imgutils.h>
#include <libavutil/mathematics.h>
#include <libavutil/opt.h>
#include <libavutil/timestamp.h>
#include <libswresample/swresample.h>
#include <libswscale/swscale.h>
#ifdef __cplusplus
}
#endif
#include <functional>
#include <memory>
class FfmpegThumbnail
{
public:
FfmpegThumbnail(const AVCodecID &encodecId, const AVCodecID &dncodecId);
virtual ~FfmpegThumbnail() = default;
void Init(void);
void UnInit(void);
bool CreateThumbnail(const std::string &outputFile, const void *data, const size_t &size);
private:
void GetDecodeDataCallback(AVFrame *frame);
void GetEncodeDataCallback(AVPacket *pkt);
private:
std::function<void(AVFrame *)> mDecodeCallback;
std::function<void(AVPacket *)> mEncodeCallback;
std::shared_ptr<FfmpegEncoder> mEncoder;
std::shared_ptr<FfmpegDecoder> mDecoder;
AVFormatContext *mOutputFormat;
AVStream *mStream;
struct SwsContext *sws_ctx;
};
#endif