mirror of
https://github.com/RetroDECK/ES-DE.git
synced 2025-01-31 04:25:40 +00:00
Fixed an issue in VideoFFmpegComponent that could lead to a crash.
Also renamed a function in VideoFFmpegComponent.
This commit is contained in:
parent
b87d7238fc
commit
d9eda97ca6
|
@ -181,7 +181,7 @@ void VideoFFmpegComponent::update(int deltaTime)
|
||||||
if (!mEndOfVideo && mIsActuallyPlaying && mVideoFrameQueue.empty() && mAudioFrameQueue.empty())
|
if (!mEndOfVideo && mIsActuallyPlaying && mVideoFrameQueue.empty() && mAudioFrameQueue.empty())
|
||||||
mEndOfVideo = true;
|
mEndOfVideo = true;
|
||||||
|
|
||||||
processFrames();
|
outputFrames();
|
||||||
}
|
}
|
||||||
|
|
||||||
void VideoFFmpegComponent::readFrames()
|
void VideoFFmpegComponent::readFrames()
|
||||||
|
@ -195,8 +195,9 @@ void VideoFFmpegComponent::readFrames()
|
||||||
!avcodec_receive_frame(mVideoCodecContext, mVideoFrame)) {
|
!avcodec_receive_frame(mVideoCodecContext, mVideoFrame)) {
|
||||||
|
|
||||||
// We have a video frame that needs conversion to RGBA format.
|
// We have a video frame that needs conversion to RGBA format.
|
||||||
int dst_linesize[4];
|
uint8_t* frameRGBA[4];
|
||||||
uint8_t* frameRGB[4];
|
int lineSizes[4];
|
||||||
|
int allocatedSize = 0;
|
||||||
|
|
||||||
// The pts value is the presentation time, i.e. the time stamp when
|
// The pts value is the presentation time, i.e. the time stamp when
|
||||||
// the frame (picture) should be displayed.
|
// the frame (picture) should be displayed.
|
||||||
|
@ -210,21 +211,21 @@ void VideoFFmpegComponent::readFrames()
|
||||||
|
|
||||||
// Conversion using libswscale. Bicubic interpolation gives a good
|
// Conversion using libswscale. Bicubic interpolation gives a good
|
||||||
// balance between speed and image quality.
|
// balance between speed and image quality.
|
||||||
struct SwsContext* conversionContext =
|
struct SwsContext* conversionContext = sws_getContext(
|
||||||
sws_getContext(mVideoCodecContext->width,
|
mVideoCodecContext->coded_width,
|
||||||
mVideoCodecContext->height,
|
mVideoCodecContext->coded_height,
|
||||||
mVideoCodecContext->pix_fmt,
|
mVideoCodecContext->pix_fmt,
|
||||||
mVideoCodecContext->width,
|
mVideoFrame->width,
|
||||||
mVideoCodecContext->height,
|
mVideoFrame->height,
|
||||||
AV_PIX_FMT_RGBA,
|
AV_PIX_FMT_RGBA,
|
||||||
SWS_BICUBIC,
|
SWS_BICUBIC,
|
||||||
nullptr,
|
nullptr,
|
||||||
nullptr,
|
nullptr,
|
||||||
nullptr);
|
nullptr);
|
||||||
|
|
||||||
av_image_alloc(
|
allocatedSize = av_image_alloc(
|
||||||
frameRGB,
|
frameRGBA,
|
||||||
dst_linesize,
|
lineSizes,
|
||||||
mVideoFrame->width,
|
mVideoFrame->width,
|
||||||
mVideoFrame->height,
|
mVideoFrame->height,
|
||||||
AV_PIX_FMT_RGB32,
|
AV_PIX_FMT_RGB32,
|
||||||
|
@ -235,22 +236,23 @@ void VideoFFmpegComponent::readFrames()
|
||||||
const_cast<uint8_t const* const*>(mVideoFrame->data),
|
const_cast<uint8_t const* const*>(mVideoFrame->data),
|
||||||
mVideoFrame->linesize,
|
mVideoFrame->linesize,
|
||||||
0,
|
0,
|
||||||
mVideoCodecContext->height,
|
mVideoCodecContext->coded_height,
|
||||||
frameRGB,
|
frameRGBA,
|
||||||
dst_linesize);
|
lineSizes);
|
||||||
|
|
||||||
VideoFrame currFrame;
|
VideoFrame currFrame;
|
||||||
|
|
||||||
// Save the frame into the queue for later processing.
|
// Save the frame into the queue for later processing.
|
||||||
currFrame.width = mVideoFrame->width;
|
currFrame.width = mVideoFrame->width;
|
||||||
currFrame.height = mVideoFrame->height;
|
currFrame.height = mVideoFrame->height;
|
||||||
currFrame.frameRGBA.insert(currFrame.frameRGBA.begin(), &frameRGB[0][0],
|
|
||||||
&frameRGB[0][currFrame.width * currFrame.height * 4]);
|
currFrame.frameRGBA.insert(currFrame.frameRGBA.begin(),
|
||||||
|
&frameRGBA[0][0], &frameRGBA[0][allocatedSize - 1]);
|
||||||
currFrame.pts = pts;
|
currFrame.pts = pts;
|
||||||
|
|
||||||
mVideoFrameQueue.push(currFrame);
|
mVideoFrameQueue.push(currFrame);
|
||||||
|
|
||||||
av_freep(&frameRGB[0]);
|
av_freep(&frameRGBA[0]);
|
||||||
sws_freeContext(conversionContext);
|
sws_freeContext(conversionContext);
|
||||||
av_packet_unref(mPacket);
|
av_packet_unref(mPacket);
|
||||||
break;
|
break;
|
||||||
|
@ -406,7 +408,7 @@ void VideoFFmpegComponent::readFrames()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void VideoFFmpegComponent::processFrames()
|
void VideoFFmpegComponent::outputFrames()
|
||||||
{
|
{
|
||||||
// Check if we should start counting the time (i.e. start playing the video).
|
// Check if we should start counting the time (i.e. start playing the video).
|
||||||
// The audio stream controls when the playback and time counting starts, assuming
|
// The audio stream controls when the playback and time counting starts, assuming
|
||||||
|
|
|
@ -51,8 +51,10 @@ private:
|
||||||
void render(const Transform4x4f& parentTrans) override;
|
void render(const Transform4x4f& parentTrans) override;
|
||||||
void update(int deltaTime) override;
|
void update(int deltaTime) override;
|
||||||
|
|
||||||
|
// Read frames from the video file and perform format conversion.
|
||||||
void readFrames();
|
void readFrames();
|
||||||
void processFrames();
|
// Output frames to AudioManager and to the video surface.
|
||||||
|
void outputFrames();
|
||||||
|
|
||||||
void calculateBlackRectangle();
|
void calculateBlackRectangle();
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue