# 1. ffmpeg开发文档 ## 1.1. ffplay命令使用 * 播放G711a音频文件 ```code $ ffplay -i audio.g711a -f alaw -ac 1 -ar 8000 ``` ffmpeg -i input.g711a -acodec alaw output.wav * 播放h264视频文件 ```code $ ffplay video.h264 ``` * g711a音频文件转wav音频文件 ```code $ fmpeg -f mulaw -ar 8000 -i audio.g711a audio.wav ``` * 播放pcm文件 ```code $ ffplay -f s16le -ar 8000 -ac 1 test.pcm ``` * 生成jpeg ```code $ ./ffmpeg -i test.h264 -vframes 1 -vf "scale=640:480:force_original_aspect_ratio=decrease" -f image2 output.jpeg ``` ```code void FfmpegThumbnail::GetDecodeDataCallback(AVFrame *frame) { LogInfo("GetDecodeDataCallback frame->width = %d, frame->height = %d\n", frame->width, frame->height); // Allocate output frame for YUV420P format AVFrame *output_frame = av_frame_alloc(); output_frame->format = AV_PIX_FMT_YUV420P; output_frame->width = 640; output_frame->height = 480; // Calculate buffer size for YUV420P int yuv_buf_size = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, 640, 480, 1); uint8_t *yuv_buf = (uint8_t *)av_malloc(yuv_buf_size); // Fill output frame with YUV420P buffer av_image_fill_arrays(output_frame->data, output_frame->linesize, yuv_buf, AV_PIX_FMT_YUV420P, 640, 480, 1); // Create SwsContext for pixel format conversion from YUV420P (1920x2160) to YUV420P (640x480) SwsContext *sws_ctx = sws_getContext(frame->width, frame->height, static_cast(frame->format), output_frame->width, output_frame->height, AV_PIX_FMT_YUV420P, SWS_BILINEAR, nullptr, nullptr, nullptr); if (!sws_ctx) { LogError("Failed to create SwsContext for pixel format conversion\n"); av_frame_free(&output_frame); av_free(yuv_buf); return; } // Perform pixel format conversion and scaling sws_scale(sws_ctx, frame->data, frame->linesize, 0, frame->height, output_frame->data, output_frame->linesize); // Clean up SwsContext sws_freeContext(sws_ctx); // Encode the YUV420P frame to JPEG using mEncoder if (mEncoder) { mEncoder->EncodeData(output_frame, mStream, mEncodeCallback); } // Free allocated resources av_frame_free(&output_frame); av_free(yuv_buf); } ``` * 将h264和wav文件合成mp4文件 **注意:未发现可以将h264和g711a文件合成mp4文件** ```code $ ffmpeg -i video.h264 -i audio.wav -c:v copy -c:a aac -b:a 96k test.mp4 ``` ## 1.2. 问题记录 ### 1.2.1. avformat_open_input 执行失败   在执行avformat_open_input时,返回-1094995529<0,错误 解决:在Ubuntu编译时,使能所有的编译选项,并且把--arch=赋值为linux ```code # 详见://external/ffmpeg/CMakeLists.txt set(CONFIGURE_COMMAND "--enable-cross-compile --target-os=linux --arch=linux \ --cc=${CMAKE_C_COMPILER} \ --cxx=${CMAKE_CXX_COMPILER} \ --prefix=${EXTERNAL_LIBS_OUTPUT_PATH}/ffmpeg \ --enable-parsers --enable-decoder=h264 \ --enable-ffmpeg --enable-shared --enable-static \ --enable-gpl --enable-nonfree --enable-version3 --enable-small \ --enable-muxer=mov --enable-muxer=mp4 \ --enable-decoder=aac \ --enable-demuxer=mov \ --enable-protocol=file --enable-bsf=aac_adtstoasc --enable-bsf=h264_mp4toannexb --enable-bsf=hevc_mp4toannexb") ``` ### 1.2.2. avformat_open_input 执行失败   打开g711a文件时,提示无效数据。如下: **Invalid data found when processing input** 解决: 调用 avformat_open_input 函数时,指定输入文件的格式(第三个参数),g711a文件格式为:alaw。 ```code # //utils/MediaBase/src/MediaBaseImpl.cpp const AVInputFormat *iformat = av_find_input_format(InputFormat(mType)); AVFormatContext *pFormatCtx = nullptr; if ((result = avformat_open_input(&pFormatCtx, path.c_str(), iformat, nullptr)) < 0) { char error_str[AV_ERROR_MAX_STRING_SIZE] = {0}; av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, result); LogError("Couldn't open file: %s, result=%s\n", path.c_str(), error_str); return CreateStatusCode(STATUS_CODE_NOT_OK); } ```