diff --git a/src/video/camerasource.cpp b/src/video/camerasource.cpp index dd1beb143..a2b3be41f 100644 --- a/src/video/camerasource.cpp +++ b/src/video/camerasource.cpp @@ -337,7 +337,7 @@ void CameraSource::stream() } // Free the packet that was allocated by av_read_frame - av_free_packet(&packet); + av_packet_unref(&packet); }; forever { diff --git a/src/video/corevideosource.cpp b/src/video/corevideosource.cpp index 0a71b9b14..0e76df947 100644 --- a/src/video/corevideosource.cpp +++ b/src/video/corevideosource.cpp @@ -20,6 +20,7 @@ extern "C" { #include +#include } #include "corevideosource.h" #include "videoframe.h" @@ -53,7 +54,8 @@ void CoreVideoSource::pushFrame(const vpx_image_t* vpxframe) avframe->height = height; avframe->format = AV_PIX_FMT_YUV420P; - buf = (uint8_t*)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, width, height)); + int imgBufferSize = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, width, height, 1); + buf = (uint8_t*)av_malloc(imgBufferSize); if (!buf) { av_frame_free(&avframe); @@ -61,7 +63,9 @@ void CoreVideoSource::pushFrame(const vpx_image_t* vpxframe) } avframe->opaque = buf; - avpicture_fill((AVPicture*)avframe, buf, AV_PIX_FMT_YUV420P, width, height); + uint8_t** data = avframe->data; + int* linesize = avframe->linesize; + av_image_fill_arrays(data, linesize, buf, AV_PIX_FMT_YUV420P, width, height, 1); dstStride=avframe->linesize[0], srcStride=vpxframe->stride[0], minStride=std::min(dstStride, srcStride); for (int i=0; i extern "C" { #include +#include #include } #include "videoframe.h" @@ -154,7 +155,8 @@ bool VideoFrame::convertToRGB24(QSize size) return false; } - uint8_t* buf = (uint8_t*)av_malloc(avpicture_get_size(AV_PIX_FMT_RGB24, size.width(), size.height())); + int imgBufferSize = av_image_get_buffer_size(AV_PIX_FMT_RGB24, size.width(), size.height(), 1); + uint8_t* buf = (uint8_t*)av_malloc(imgBufferSize); if (!buf) { qCritical() << "av_malloc failed"; @@ -163,7 +165,9 @@ bool VideoFrame::convertToRGB24(QSize size) } frameRGB24->opaque = buf; - avpicture_fill((AVPicture*)frameRGB24, buf, AV_PIX_FMT_RGB24, size.width(), size.height()); + uint8_t** data = frameRGB24->data; + int* linesize = frameRGB24->linesize; + av_image_fill_arrays(data, linesize, buf, AV_PIX_FMT_RGB24, size.width(), size.height(), 1); frameRGB24->width = size.width(); frameRGB24->height = size.height(); @@ -211,7 +215,8 @@ bool VideoFrame::convertToYUV420() return false; } - uint8_t* buf = (uint8_t*)av_malloc(avpicture_get_size(AV_PIX_FMT_RGB24, width, height)); + int imgBufferSize = av_image_get_buffer_size(AV_PIX_FMT_RGB24, width, height, 1); + uint8_t* buf = (uint8_t*)av_malloc(imgBufferSize); if (!buf) { qCritical() << "av_malloc failed"; @@ -220,7 +225,9 @@ bool VideoFrame::convertToYUV420() } frameYUV420->opaque = buf; - avpicture_fill((AVPicture*)frameYUV420, buf, AV_PIX_FMT_YUV420P, width, height); + uint8_t** data = frameYUV420->data; + int* linesize = frameYUV420->linesize; + av_image_fill_arrays(data, linesize, buf, AV_PIX_FMT_YUV420P, width, height, 1); SwsContext *swsCtx = sws_getContext(width, height, (AVPixelFormat)pixFmt, width, height, AV_PIX_FMT_YUV420P,