Fixed:use after free.

This commit is contained in:
Fancy code 2024-07-11 11:32:03 +08:00
parent e038e2a9ff
commit 4c6cf898cf

View File

@ -122,7 +122,7 @@ void MediaHandle::StartTaskTimer(void)
}
void MediaHandle::TaskTimer(void)
{
constexpr int TASK_TIME_OUT = 1000 * 20;
constexpr int TASK_TIME_OUT = 1000 * 10;
mTaskRuning = true;
while (mTaskRuning) {
std::unique_lock<std::mutex> lock(mMutex);
@ -141,8 +141,8 @@ void MediaHandle::TaskTimer(void)
if (mCameraHal) {
mCameraHal->StopTask();
}
mStreamHandle.reset();
mMutex.lock();
mStreamHandle.reset();
mFirstFrameTimeStamp = MEDIA_TASK_NOT_START;
auto runingTask = mCurrentTask.lock();
if (mCurrentTask.expired()) {
@ -172,6 +172,14 @@ void MediaHandle::FrameHandle(void)
mCvFrameHandle.wait_for(lock, std::chrono::milliseconds(TASK_TIMER), [&] {
return !mTaskRuning || !mFrameList.empty();
});
/**
* @brief
* NOTE: From here on the code will be re-locked.
*/
if (!mStreamHandle) {
LogWarning("mStreamHandle is nullptr, will not handle frame.\n");
break;
}
if (mFrameList.size() > 0) {
HandleListFrame();
}
@ -189,7 +197,7 @@ void MediaHandle::HandleListFrame(void)
if (FrameType::VIDEO == frontFrame.mType) {
mStreamHandle->GetVideoStream(frontFrame.mData, frontFrame.mLength, frontFrame.mTimeStamp);
}
if (FrameType::AUDIO == frontFrame.mType) {
else if (FrameType::AUDIO == frontFrame.mType) {
mStreamHandle->GetAudioStream(frontFrame.mData, frontFrame.mLength, frontFrame.mTimeStamp);
}
free(frontFrame.mData);
@ -205,30 +213,10 @@ CameraTaskType MediaHandle::TaskTypeConvert(const MediaTaskType &type)
void MediaHandle::GetVideoStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp)
{
GetAVStream(FrameType::VIDEO, stream, length, timeStamp);
// return;
// std::unique_lock<std::mutex> lock(mMutex);
// // mStreamHandle->GetVideoStream(stream, length, timeStamp);
// OneFrameStream addFrame;
// addFrame.mData = malloc(length);
// addFrame.mLength = length;
// memcpy(addFrame.mData, stream, length);
// addFrame.mType = FrameType::VIDEO;
// mFrameList.push_back(addFrame);
// mCvFrameHandle.notify_one();
}
void MediaHandle::GetAudioStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp)
{
GetAVStream(FrameType::AUDIO, stream, length, timeStamp);
// return;
// std::unique_lock<std::mutex> lock(mMutex);
// // mStreamHandle->GetAudioStream(stream, length, timeStamp);
// OneFrameStream addFrame;
// addFrame.mData = malloc(length);
// addFrame.mLength = length;
// memcpy(addFrame.mData, stream, length);
// addFrame.mType = FrameType::AUDIO;
// mFrameList.push_back(addFrame);
// mCvFrameHandle.notify_one();
}
void MediaHandle::GetAVStream(const FrameType &type, const void *stream, const int &length,
const unsigned long long &timeStamp)