ffmpeg open audio ok.
This commit is contained in:
parent
f94c292ff4
commit
680996176b
3
external/ffmpeg/CMakeLists.txt
vendored
3
external/ffmpeg/CMakeLists.txt
vendored
|
@ -8,7 +8,7 @@ if(${TARGET_PLATFORM} MATCHES ${DEFINE_LINUX})
|
||||||
--enable-ffmpeg --enable-static \
|
--enable-ffmpeg --enable-static \
|
||||||
--enable-gpl --enable-nonfree --enable-version3 --enable-small \
|
--enable-gpl --enable-nonfree --enable-version3 --enable-small \
|
||||||
--enable-muxer=mov --enable-muxer=mp4 \
|
--enable-muxer=mov --enable-muxer=mp4 \
|
||||||
--enable-decoder=aac \
|
--enable-decoder=aac --enable-decoder=pcm_alaw --enable-encoder=pcm_alaw \
|
||||||
--enable-demuxer=mov \
|
--enable-demuxer=mov \
|
||||||
--enable-protocol=file --enable-bsf=aac_adtstoasc --enable-bsf=h264_mp4toannexb --enable-bsf=hevc_mp4toannexb")
|
--enable-protocol=file --enable-bsf=aac_adtstoasc --enable-bsf=h264_mp4toannexb --enable-bsf=hevc_mp4toannexb")
|
||||||
else()
|
else()
|
||||||
|
@ -34,6 +34,7 @@ endif()
|
||||||
message("Compile ffmpeg comand : ${CONFIGURE_COMMAND}")
|
message("Compile ffmpeg comand : ${CONFIGURE_COMMAND}")
|
||||||
add_custom_target(
|
add_custom_target(
|
||||||
ffmpeg
|
ffmpeg
|
||||||
|
COMMAND echo "Now compile ffmpeg, please wait..."
|
||||||
COMMAND test -f ${EXTERNAL_SOURCE_PATH}/ffmpeg/Makefile || tar -xf ffmpeg_6.1.1.orig.tar.xz
|
COMMAND test -f ${EXTERNAL_SOURCE_PATH}/ffmpeg/Makefile || tar -xf ffmpeg_6.1.1.orig.tar.xz
|
||||||
COMMAND chmod 777 -R ffmpeg-6.1.1
|
COMMAND chmod 777 -R ffmpeg-6.1.1
|
||||||
COMMAND cd ffmpeg-6.1.1 && bash -c "./configure ${CONFIGURE_COMMAND}"
|
COMMAND cd ffmpeg-6.1.1 && bash -c "./configure ${CONFIGURE_COMMAND}"
|
||||||
|
|
3
external/ffmpeg/README.md
vendored
3
external/ffmpeg/README.md
vendored
|
@ -7,6 +7,7 @@
|
||||||
```code
|
```code
|
||||||
$ ffplay -i audio.g711a -f alaw -ac 1 -ar 8000
|
$ ffplay -i audio.g711a -f alaw -ac 1 -ar 8000
|
||||||
```
|
```
|
||||||
|
ffmpeg -i input.g711a -acodec alaw output.wav
|
||||||
|
|
||||||
* 播放h264视频文件
|
* 播放h264视频文件
|
||||||
|
|
||||||
|
@ -35,4 +36,4 @@ set(CONFIGURE_COMMAND "--enable-cross-compile --target-os=linux --arch=linux \
|
||||||
--enable-decoder=aac \
|
--enable-decoder=aac \
|
||||||
--enable-demuxer=mov \
|
--enable-demuxer=mov \
|
||||||
--enable-protocol=file --enable-bsf=aac_adtstoasc --enable-bsf=h264_mp4toannexb --enable-bsf=hevc_mp4toannexb")
|
--enable-protocol=file --enable-bsf=aac_adtstoasc --enable-bsf=h264_mp4toannexb --enable-bsf=hevc_mp4toannexb")
|
||||||
```
|
```
|
||||||
|
|
|
@ -50,6 +50,7 @@ void SaveStream::GetVideoStream(const void *stream, const unsigned int &length,
|
||||||
void SaveStream::GetAudioStream(const void *stream, const unsigned int &length, const unsigned long long &timeStamp)
|
void SaveStream::GetAudioStream(const void *stream, const unsigned int &length, const unsigned long long &timeStamp)
|
||||||
{
|
{
|
||||||
if (mFileAudio) {
|
if (mFileAudio) {
|
||||||
|
// LogInfo("Get audio stream, length: %d\n", length);
|
||||||
size_t writeLength = fwrite(stream, 1, length, mFileAudio);
|
size_t writeLength = fwrite(stream, 1, length, mFileAudio);
|
||||||
if (writeLength != length) {
|
if (writeLength != length) {
|
||||||
LogError("Write video stream failed.\n");
|
LogError("Write video stream failed.\n");
|
||||||
|
|
|
@ -45,11 +45,11 @@ endif()
|
||||||
|
|
||||||
set(TARGET_NAME HuntingCameraTest)
|
set(TARGET_NAME HuntingCameraTest)
|
||||||
add_executable(${TARGET_NAME} ${SRC_FILES_MAIN} ${SRC_FILES})
|
add_executable(${TARGET_NAME} ${SRC_FILES_MAIN} ${SRC_FILES})
|
||||||
target_link_libraries(${TARGET_NAME} -Wl,--start-group HuntingMainLib MissionManagerTestTool McuManagerTestTool McuAskBaseTestTool
|
target_link_libraries(${TARGET_NAME}# -Wl,--start-group
|
||||||
AppManagerTestTool HalTestTool DeviceManagerTestTool TestManager
|
HuntingMainLib MissionManagerTestTool McuManagerTestTool McuAskBaseTestTool
|
||||||
MediaBase avformat avcodec avutil swresample avdevice avfilter swscale postproc z
|
AppManagerTestTool HalTestTool DeviceManagerTestTool TestManager
|
||||||
-Wl,--end-group
|
# -Wl,--end-group
|
||||||
gtest gmock pthread)
|
gtest gmock pthread)
|
||||||
if(${TEST_COVERAGE} MATCHES "true")
|
if(${TEST_COVERAGE} MATCHES "true")
|
||||||
target_link_libraries(${TARGET_NAME} gcov)
|
target_link_libraries(${TARGET_NAME} gcov)
|
||||||
endif()
|
endif()
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
#include "StatusCode.h"
|
#include "StatusCode.h"
|
||||||
#include <memory>
|
#include <memory>
|
||||||
CameraHalTest::CameraHalTest(const CameraType &cameraType)
|
CameraHalTest::CameraHalTest(const CameraType &cameraType)
|
||||||
: mCameraType(cameraType), mReadH264File(nullptr), mReadG711File(nullptr), mTaskRuning(false)
|
: mCameraType(cameraType), mReadH264File(nullptr), mReadG711aFile(nullptr), mTaskRuning(false)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
void CameraHalTest::Init(void)
|
void CameraHalTest::Init(void)
|
||||||
|
@ -32,12 +32,12 @@ void CameraHalTest::Init(void)
|
||||||
mReadH264File = ICreateMediaBase(MEDIA_HANDLE_TYPE_READ_H264);
|
mReadH264File = ICreateMediaBase(MEDIA_HANDLE_TYPE_READ_H264);
|
||||||
ISetReadVideoCallback(mReadH264File, videCallback, this);
|
ISetReadVideoCallback(mReadH264File, videCallback, this);
|
||||||
}
|
}
|
||||||
if (nullptr == mReadG711File) {
|
if (nullptr == mReadG711aFile) {
|
||||||
ReadAudioFileCallback audioCallback = [](const void *stream, const unsigned int length, void *context) -> void {
|
ReadAudioFileCallback audioCallback = [](const void *stream, const unsigned int length, void *context) -> void {
|
||||||
((CameraHalTest *)context)->ReadDataFromH264File(stream, length);
|
((CameraHalTest *)context)->ReadDataFromG711aFile(stream, length);
|
||||||
};
|
};
|
||||||
mReadG711File = ICreateMediaBase(MEDIA_HANDLE_TYPE_READ_H264);
|
mReadG711aFile = ICreateMediaBase(MEDIA_HANDLE_TYPE_READ_G711A);
|
||||||
ISetReadVideoCallback(mReadG711File, audioCallback, this);
|
ISetReadVideoCallback(mReadG711aFile, audioCallback, this);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
void CameraHalTest::UnInit(void)
|
void CameraHalTest::UnInit(void)
|
||||||
|
@ -54,11 +54,11 @@ void CameraHalTest::UnInit(void)
|
||||||
IMediaBaseFree(mReadH264File);
|
IMediaBaseFree(mReadH264File);
|
||||||
mReadH264File = nullptr;
|
mReadH264File = nullptr;
|
||||||
}
|
}
|
||||||
if (mReadG711File) {
|
if (mReadG711aFile) {
|
||||||
ISetReadAudioCallback(mReadG711File, nullptr, nullptr);
|
ISetReadAudioCallback(mReadG711aFile, nullptr, nullptr);
|
||||||
IStopReadFile(mReadG711File);
|
IStopReadFile(mReadG711aFile);
|
||||||
IMediaBaseFree(mReadG711File);
|
IMediaBaseFree(mReadG711aFile);
|
||||||
mReadG711File = nullptr;
|
mReadG711aFile = nullptr;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
void CameraHalTest::SetCameraMonitor(std::shared_ptr<VCameraHalMonitor> &monitor)
|
void CameraHalTest::SetCameraMonitor(std::shared_ptr<VCameraHalMonitor> &monitor)
|
||||||
|
@ -95,8 +95,8 @@ StatusCode CameraHalTest::StopTask(void)
|
||||||
if (nullptr != mReadH264File) {
|
if (nullptr != mReadH264File) {
|
||||||
IStopReadFile(mReadH264File);
|
IStopReadFile(mReadH264File);
|
||||||
}
|
}
|
||||||
if (nullptr != mReadG711File) {
|
if (nullptr != mReadG711aFile) {
|
||||||
IStopReadFile(mReadG711File);
|
IStopReadFile(mReadG711aFile);
|
||||||
}
|
}
|
||||||
mCv.notify_one();
|
mCv.notify_one();
|
||||||
return CameraHal::StopTask();
|
return CameraHal::StopTask();
|
||||||
|
@ -112,8 +112,8 @@ void CameraHalTest::MockReportMediaStream(void)
|
||||||
if (nullptr != mReadH264File) {
|
if (nullptr != mReadH264File) {
|
||||||
IStartReadFile(mReadH264File, TEST_SOURCE_PATH "/support_test/video.h264");
|
IStartReadFile(mReadH264File, TEST_SOURCE_PATH "/support_test/video.h264");
|
||||||
}
|
}
|
||||||
if (nullptr != mReadG711File) {
|
if (nullptr != mReadG711aFile) {
|
||||||
IStartReadFile(mReadG711File, TEST_SOURCE_PATH "/support_test/audio.g711a");
|
IStartReadFile(mReadG711aFile, TEST_SOURCE_PATH "/support_test/audio.g711a");
|
||||||
}
|
}
|
||||||
while (mTaskRuning) {
|
while (mTaskRuning) {
|
||||||
std::unique_lock<std::mutex> lock(mMutex);
|
std::unique_lock<std::mutex> lock(mMutex);
|
||||||
|
@ -130,6 +130,10 @@ void CameraHalTest::ReadDataFromH264File(const void *stream, const unsigned int
|
||||||
{
|
{
|
||||||
GetVideoStream(stream, length, 0);
|
GetVideoStream(stream, length, 0);
|
||||||
}
|
}
|
||||||
|
void CameraHalTest::ReadDataFromG711aFile(const void *stream, const unsigned int length)
|
||||||
|
{
|
||||||
|
GetAudioStream(stream, length, 0);
|
||||||
|
}
|
||||||
CameraHalMock::CameraHalMock(const CameraType &cameraType) : CameraHalTest(cameraType)
|
CameraHalMock::CameraHalMock(const CameraType &cameraType) : CameraHalTest(cameraType)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,13 +38,14 @@ protected:
|
||||||
private:
|
private:
|
||||||
void MockReportMediaStream(void);
|
void MockReportMediaStream(void);
|
||||||
void ReadDataFromH264File(const void *stream, const unsigned int length);
|
void ReadDataFromH264File(const void *stream, const unsigned int length);
|
||||||
|
void ReadDataFromG711aFile(const void *stream, const unsigned int length);
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
const CameraType mCameraType;
|
const CameraType mCameraType;
|
||||||
std::weak_ptr<VCameraHalMonitor> mMonitor;
|
std::weak_ptr<VCameraHalMonitor> mMonitor;
|
||||||
std::shared_ptr<CameraReportEvent> mFastBootEvent;
|
std::shared_ptr<CameraReportEvent> mFastBootEvent;
|
||||||
void *mReadH264File;
|
void *mReadH264File;
|
||||||
void *mReadG711File;
|
void *mReadG711aFile;
|
||||||
std::mutex mMutex;
|
std::mutex mMutex;
|
||||||
std::condition_variable mCv;
|
std::condition_variable mCv;
|
||||||
bool mTaskRuning;
|
bool mTaskRuning;
|
||||||
|
|
|
@ -19,7 +19,7 @@ aux_source_directory(./src SRC_FILES)
|
||||||
|
|
||||||
set(TARGET_NAME MediaBase)
|
set(TARGET_NAME MediaBase)
|
||||||
add_library(${TARGET_NAME} STATIC ${SRC_FILES})
|
add_library(${TARGET_NAME} STATIC ${SRC_FILES})
|
||||||
target_link_libraries(${TARGET_NAME} StatusCode Log)
|
target_link_libraries(${TARGET_NAME} avformat avcodec avutil swresample avdevice avfilter swscale postproc z StatusCode Log)
|
||||||
|
|
||||||
add_custom_target(
|
add_custom_target(
|
||||||
MediaBase_code_check
|
MediaBase_code_check
|
||||||
|
|
|
@ -21,7 +21,7 @@ extern "C" {
|
||||||
enum MediaHandleType
|
enum MediaHandleType
|
||||||
{
|
{
|
||||||
MEDIA_HANDLE_TYPE_READ_H264 = 0,
|
MEDIA_HANDLE_TYPE_READ_H264 = 0,
|
||||||
MEDIA_HANDLE_TYPE_READ_G711,
|
MEDIA_HANDLE_TYPE_READ_G711A,
|
||||||
MEDIA_HANDLE_TYPE_END
|
MEDIA_HANDLE_TYPE_END
|
||||||
};
|
};
|
||||||
typedef void (*ReadVideoFileCallback)(const void *, const unsigned int, void *);
|
typedef void (*ReadVideoFileCallback)(const void *, const unsigned int, void *);
|
||||||
|
|
|
@ -48,7 +48,7 @@ const char *GetMediaBaseModuleName(void)
|
||||||
}
|
}
|
||||||
std::shared_ptr<IMediaBase> *NewIMediaBase(const MediaHandleType &type)
|
std::shared_ptr<IMediaBase> *NewIMediaBase(const MediaHandleType &type)
|
||||||
{
|
{
|
||||||
LogInfo("Create the midia base object.\n");
|
LogInfo("Create the midia base object, type = %d.\n", type);
|
||||||
MeidaAdapter *impl = (MeidaAdapter *)malloc(sizeof(MeidaAdapter));
|
MeidaAdapter *impl = (MeidaAdapter *)malloc(sizeof(MeidaAdapter));
|
||||||
MeidaAdapter tmp;
|
MeidaAdapter tmp;
|
||||||
memcpy((void *)impl, (void *)&tmp, sizeof(MeidaAdapter));
|
memcpy((void *)impl, (void *)&tmp, sizeof(MeidaAdapter));
|
||||||
|
|
|
@ -38,22 +38,17 @@ StatusCode MediaBaseImpl::StartReadFile(const std::string &path)
|
||||||
{
|
{
|
||||||
InitFfmpeg();
|
InitFfmpeg();
|
||||||
int result = 0;
|
int result = 0;
|
||||||
|
const AVInputFormat *iformat = av_find_input_format(InputFormat(mType));
|
||||||
AVFormatContext *pFormatCtx = nullptr;
|
AVFormatContext *pFormatCtx = nullptr;
|
||||||
if ((result = avformat_open_input(&pFormatCtx, path.c_str(), nullptr, nullptr)) < 0) {
|
if ((result = avformat_open_input(&pFormatCtx, path.c_str(), iformat, nullptr)) < 0) {
|
||||||
char error_str[AV_ERROR_MAX_STRING_SIZE];
|
char error_str[AV_ERROR_MAX_STRING_SIZE];
|
||||||
av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, result);
|
av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, result);
|
||||||
LogError("Couldn't open file: %s, result=%s\n", path.c_str(), error_str);
|
LogError("Couldn't open file: %s, result=%s\n", path.c_str(), error_str);
|
||||||
// LogError("Couldn't open file: %s, result=%s\n", path.c_str(), av_err2str(result));
|
|
||||||
return CreateStatusCode(STATUS_CODE_NOT_OK);
|
return CreateStatusCode(STATUS_CODE_NOT_OK);
|
||||||
}
|
}
|
||||||
// LogInfo("File: %s\n", pFormatCtx->filename);
|
|
||||||
// LogInfo("File format: %s\n", pFormatCtx->iformat->name);
|
|
||||||
// LogInfo("Duration: %ld\n", pFormatCtx->duration);
|
|
||||||
// for (int i = 0; i < pFormatCtx->nb_streams; i++) {
|
|
||||||
// LogInfo("stream codec_type = %d\n", pFormatCtx->streams[i]->codecpar->codec_type);
|
|
||||||
// }
|
|
||||||
if (avformat_find_stream_info(pFormatCtx, nullptr) < 0) {
|
if (avformat_find_stream_info(pFormatCtx, nullptr) < 0) {
|
||||||
LogError("Couldn't find stream information.\n");
|
LogError("Couldn't find stream information.\n");
|
||||||
|
avformat_close_input(&pFormatCtx);
|
||||||
return CreateStatusCode(STATUS_CODE_NOT_OK);
|
return CreateStatusCode(STATUS_CODE_NOT_OK);
|
||||||
}
|
}
|
||||||
int mediaStreamIndex = -1;
|
int mediaStreamIndex = -1;
|
||||||
|
@ -65,6 +60,7 @@ StatusCode MediaBaseImpl::StartReadFile(const std::string &path)
|
||||||
}
|
}
|
||||||
if (mediaStreamIndex == -1) {
|
if (mediaStreamIndex == -1) {
|
||||||
LogError("Didn't find a stream.\n");
|
LogError("Didn't find a stream.\n");
|
||||||
|
avformat_close_input(&pFormatCtx);
|
||||||
return CreateStatusCode(STATUS_CODE_NOT_OK);
|
return CreateStatusCode(STATUS_CODE_NOT_OK);
|
||||||
}
|
}
|
||||||
auto taskTimerThread = [=](std::shared_ptr<MediaBaseImpl> media) {
|
auto taskTimerThread = [=](std::shared_ptr<MediaBaseImpl> media) {
|
||||||
|
@ -127,7 +123,7 @@ void MediaBaseImpl::ReadFileThread(AVFormatContext *pFormatCtx, int mediaStreamI
|
||||||
playTimeMs = (packet.duration * pFormatCtx->streams[mediaStreamIndex]->time_base.num * 1000) /
|
playTimeMs = (packet.duration * pFormatCtx->streams[mediaStreamIndex]->time_base.num * 1000) /
|
||||||
pFormatCtx->streams[mediaStreamIndex]->time_base.den;
|
pFormatCtx->streams[mediaStreamIndex]->time_base.den;
|
||||||
// LogInfo("Frame data address: %p, length: %zu\n", packet.data, packet.size);
|
// LogInfo("Frame data address: %p, length: %zu\n", packet.data, packet.size);
|
||||||
// LogInfo("Play time ms:%d\n", playTimeMs);
|
LogInfo("Play time ms:%d\n", playTimeMs);
|
||||||
ReadFrame(&packet);
|
ReadFrame(&packet);
|
||||||
std::this_thread::sleep_for(std::chrono::milliseconds(playTimeMs));
|
std::this_thread::sleep_for(std::chrono::milliseconds(playTimeMs));
|
||||||
}
|
}
|
||||||
|
@ -153,11 +149,25 @@ void MediaBaseImpl::MediaTypeConvert(void)
|
||||||
case MediaHandleType::MEDIA_HANDLE_TYPE_READ_H264:
|
case MediaHandleType::MEDIA_HANDLE_TYPE_READ_H264:
|
||||||
mFFmpegMediaType = AVMEDIA_TYPE_VIDEO;
|
mFFmpegMediaType = AVMEDIA_TYPE_VIDEO;
|
||||||
break;
|
break;
|
||||||
case MediaHandleType::MEDIA_HANDLE_TYPE_READ_G711:
|
case MediaHandleType::MEDIA_HANDLE_TYPE_READ_G711A:
|
||||||
mFFmpegMediaType = AVMEDIA_TYPE_AUDIO;
|
mFFmpegMediaType = AVMEDIA_TYPE_AUDIO;
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
LogError("Unknown media type.\n");
|
LogError("Unknown media type.\n");
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
const char *MediaBaseImpl::InputFormat(const MediaHandleType &type)
|
||||||
|
{
|
||||||
|
switch (type) {
|
||||||
|
case MEDIA_HANDLE_TYPE_READ_H264:
|
||||||
|
LogInfo("InputFormat: h264.\n");
|
||||||
|
return "h264";
|
||||||
|
case MEDIA_HANDLE_TYPE_READ_G711A:
|
||||||
|
LogInfo("InputFormat: alaw.\n");
|
||||||
|
return "alaw";
|
||||||
|
default:
|
||||||
|
LogError("Unknown media type.\n");
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
}
|
}
|
|
@ -43,9 +43,10 @@ private:
|
||||||
void ReadFileThread(AVFormatContext *pFormatCtx, int video_stream_index);
|
void ReadFileThread(AVFormatContext *pFormatCtx, int video_stream_index);
|
||||||
void ReadFrame(AVPacket *packet);
|
void ReadFrame(AVPacket *packet);
|
||||||
void MediaTypeConvert(void);
|
void MediaTypeConvert(void);
|
||||||
|
const char *InputFormat(const MediaHandleType &type);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
const MediaHandleType &mType;
|
const MediaHandleType mType;
|
||||||
enum AVMediaType mFFmpegMediaType;
|
enum AVMediaType mFFmpegMediaType;
|
||||||
ReadVideoFileCallback mReadVideoCallback;
|
ReadVideoFileCallback mReadVideoCallback;
|
||||||
void *mReadVideoCallbackContext;
|
void *mReadVideoCallbackContext;
|
||||||
|
|
|
@ -33,7 +33,7 @@ std::shared_ptr<MediaBaseMakePtr> &MediaBaseMakePtr::GetInstance(std::shared_ptr
|
||||||
}
|
}
|
||||||
std::shared_ptr<IMediaBase> MediaBaseMakePtr::CreateMediaBase(const MediaHandleType &type)
|
std::shared_ptr<IMediaBase> MediaBaseMakePtr::CreateMediaBase(const MediaHandleType &type)
|
||||||
{
|
{
|
||||||
LogInfo("MediaBaseMakePtr::CreateMediaBase.\n");
|
LogInfo("MediaBaseMakePtr::CreateMediaBase, type = %d.\n", type);
|
||||||
auto tmp = std::make_shared<MediaBaseImpl>(type);
|
auto tmp = std::make_shared<MediaBaseImpl>(type);
|
||||||
return tmp;
|
return tmp;
|
||||||
}
|
}
|
|
@ -208,7 +208,9 @@ StatusCode WebServerInit(const WebServerParam webParam)
|
||||||
return CreateStatusCode(STATUS_CODE_NOT_OK);
|
return CreateStatusCode(STATUS_CODE_NOT_OK);
|
||||||
}
|
}
|
||||||
if (websListen(listen) < 0) {
|
if (websListen(listen) < 0) {
|
||||||
return CreateStatusCode(STATUS_CODE_NOT_OK);
|
// TODO: delected memory leaks.
|
||||||
|
finished = 1;
|
||||||
|
goto EXIT;
|
||||||
}
|
}
|
||||||
if (nullptr != webParam.mHttpRequestHandle) {
|
if (nullptr != webParam.mHttpRequestHandle) {
|
||||||
gHttpHandle = webParam.mHttpRequestHandle;
|
gHttpHandle = webParam.mHttpRequestHandle;
|
||||||
|
@ -219,6 +221,7 @@ StatusCode WebServerInit(const WebServerParam webParam)
|
||||||
websDefineHandler("AppGetThumbnail", 0, AppGetThumbnail, 0, 0);
|
websDefineHandler("AppGetThumbnail", 0, AppGetThumbnail, 0, 0);
|
||||||
websAddRoute("/app/getthumbnail", "AppGetThumbnail", 0);
|
websAddRoute("/app/getthumbnail", "AppGetThumbnail", 0);
|
||||||
}
|
}
|
||||||
|
EXIT:
|
||||||
websServiceEvents(&finished);
|
websServiceEvents(&finished);
|
||||||
logmsg(1, "Instructed to exit\n");
|
logmsg(1, "Instructed to exit\n");
|
||||||
websClose();
|
websClose();
|
||||||
|
|
Loading…
Reference in New Issue
Block a user