1
0
mirror of https://github.com/qTox/qTox.git synced 2024-03-22 14:00:36 +08:00

Make the video code a singleton, fix multithreading bugs

There can now only be one CameraSource running.

Video frames are decoded in their own thread, and then converted by users in the user's threads.
The CameraSource API is entirely thread-safe and controls the video decoding thread.

The video device only stays open as long as there are users subscribed to the CameraSource.

We use a dangerous combination of spinlocks and memory fences to keep things synchronized.
This commit is contained in:
tux3 2015-06-26 17:38:53 +02:00
parent b132443c80
commit 2ab55568f2
No known key found for this signature in database
GPG Key ID: 7E086DD661263264
8 changed files with 213 additions and 147 deletions

View File

@ -127,8 +127,6 @@ Core::~Core()
if (!call.active)
continue;
hangupCall(call.callId);
if (call.camera)
delete call.camera;
}
deadifyTox();

View File

@ -81,9 +81,9 @@ void Core::prepareCall(uint32_t friendId, int32_t callId, ToxAv* toxav, bool vid
if (calls[callId].videoEnabled)
{
calls[callId].videoSource = new CoreVideoSource;
calls[callId].camera = new CameraSource;
calls[callId].camera->subscribe();
connect(calls[callId].camera, &VideoSource::frameAvailable,
CameraSource& source = CameraSource::getInstance();
source.subscribe();
connect(&source, &VideoSource::frameAvailable,
[=](std::shared_ptr<VideoFrame> frame){sendCallVideo(callId,toxav,frame);});
}
@ -119,16 +119,15 @@ void Core::onAvMediaChange(void* toxav, int32_t callId, void* core)
{
emit static_cast<Core*>(core)->avMediaChange(friendId, callId, true);
calls[callId].videoSource = new CoreVideoSource;
calls[callId].camera = new CameraSource;
calls[callId].camera->subscribe();
CameraSource& source = CameraSource::getInstance();
source.subscribe();
calls[callId].videoEnabled = true;
}
else // Audio call
{
emit static_cast<Core*>(core)->avMediaChange(friendId, callId, false);
calls[callId].videoEnabled = false;
delete calls[callId].camera;
calls[callId].camera = nullptr;
CameraSource::getInstance().unsubscribe();
calls[callId].videoSource->setDeleteOnClose(true);
calls[callId].videoSource = nullptr;
}
@ -240,8 +239,7 @@ void Core::cleanupCall(int32_t callId)
calls[callId].sendAudioTimer->stop();
if (calls[callId].videoEnabled)
{
delete calls[callId].camera;
calls[callId].camera = nullptr;
CameraSource::getInstance().unsubscribe();
if (calls[callId].videoSource)
{
calls[callId].videoSource->setDeleteOnClose(true);

View File

@ -48,7 +48,6 @@ struct ToxCall
bool muteVol;
ALuint alSource;
CoreVideoSource* videoSource;
CameraSource* camera;
};
struct ToxGroupCall

View File

@ -32,25 +32,65 @@ extern "C" {
#include "cameradevice.h"
#include "videoframe.h"
CameraSource* CameraSource::instance{nullptr};
CameraSource::CameraSource()
: CameraSource{CameraDevice::getDefaultDeviceName()}
{
}
CameraSource::CameraSource(const QString deviceName)
: CameraSource{deviceName, VideoMode{0,0,0}}
{
}
CameraSource::CameraSource(const QString deviceName, VideoMode mode)
: deviceName{deviceName}, device{nullptr}, mode(mode),
cctx{nullptr}, videoStreamIndex{-1},
biglock{false}, freelistLock{false}, subscriptions{0}
: deviceName{"none"}, device{nullptr}, mode(VideoMode{0,0,0}),
cctx{nullptr}, cctxOrig{nullptr}, videoStreamIndex{-1},
biglock{false}, freelistLock{false},
isOpen{false}, subscriptions{0}
{
av_register_all();
avdevice_register_all();
}
isNull = (deviceName == "none");
CameraSource& CameraSource::getInstance()
{
if (!instance)
instance = new CameraSource();
return *instance;
}
void CameraSource::open()
{
open(CameraDevice::getDefaultDeviceName());
}
void CameraSource::open(const QString deviceName)
{
open(deviceName, VideoMode{0,0,0});
}
void CameraSource::open(const QString DeviceName, VideoMode Mode)
{
{
bool expected = false;
while (!biglock.compare_exchange_weak(expected, true))
expected = false;
}
if (DeviceName == deviceName && Mode == mode)
{
biglock = false;
return;
}
if (subscriptions)
closeDevice();
deviceName = DeviceName;
mode = Mode;
isOpen = (deviceName != "none");
if (subscriptions && isOpen)
openDevice();
biglock = false;
}
void CameraSource::close()
{
open("none");
}
CameraSource::~CameraSource()
@ -62,7 +102,7 @@ CameraSource::~CameraSource()
expected = false;
}
if (isNull)
if (!isOpen)
{
biglock = false;
return;
@ -86,6 +126,8 @@ CameraSource::~CameraSource()
for (int i=subscriptions; i; --i)
device->close();
device = nullptr;
// Memfence so the stream thread sees a nullptr device
std::atomic_thread_fence(std::memory_order_release);
biglock=false;
// Synchronize with our stream thread
@ -102,81 +144,30 @@ bool CameraSource::subscribe()
expected = false;
}
if (isNull)
if (!isOpen)
{
biglock = false;
return true;
}
if (device)
{
device->open();
++subscriptions;
biglock = false;
return true;
}
// We need to create a new CameraDevice
AVCodec* codec;
if (mode)
device = CameraDevice::open(deviceName, mode);
else
device = CameraDevice::open(deviceName);
if (!device)
if (openDevice())
{
++subscriptions;
biglock = false;
return true;
}
else
{
while (device && !device->close()) {}
device = nullptr;
cctx = cctxOrig = nullptr;
videoStreamIndex = -1;
// Memfence so the stream thread sees a nullptr device
std::atomic_thread_fence(std::memory_order_release);
biglock = false;
qWarning() << "Failed to open device!";
return false;
}
// Find the first video stream
for (unsigned i=0; i<device->context->nb_streams; i++)
{
if(device->context->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO)
{
videoStreamIndex=i;
break;
}
}
if (videoStreamIndex == -1)
goto fail;
// Get a pointer to the codec context for the video stream
cctxOrig=device->context->streams[videoStreamIndex]->codec;
codec=avcodec_find_decoder(cctxOrig->codec_id);
if(!codec)
goto fail;
// Copy context, since we apparently aren't allowed to use the original
cctx = avcodec_alloc_context3(codec);
if(avcodec_copy_context(cctx, cctxOrig) != 0)
goto fail;
cctx->refcounted_frames = 1;
// Open codec
if(avcodec_open2(cctx, codec, nullptr)<0)
{
avcodec_free_context(&cctx);
goto fail;
}
if (streamFuture.isRunning())
qCritical() << "The stream thread is already running! Keeping the current one open.";
else
streamFuture = QtConcurrent::run(std::bind(&CameraSource::stream, this));
// Synchronize with our stream thread
while (!streamFuture.isRunning())
QThread::yieldCurrentThread();
++subscriptions;
biglock = false;
return true;
fail:
while (!device->close()) {}
biglock = false;
return false;
}
void CameraSource::unsubscribe()
@ -188,8 +179,9 @@ void CameraSource::unsubscribe()
expected = false;
}
if (isNull)
if (!isOpen)
{
--subscriptions;
biglock = false;
return;
}
@ -203,23 +195,7 @@ void CameraSource::unsubscribe()
if (--subscriptions == 0)
{
// Free all remaining VideoFrame
// Locking must be done precisely this way to avoid races
for (int i=0; i<freelist.size(); i++)
{
std::shared_ptr<VideoFrame> vframe = freelist[i].lock();
if (!vframe)
continue;
vframe->releaseFrame();
}
// Free our resources and close the device
videoStreamIndex = -1;
avcodec_free_context(&cctx);
avcodec_close(cctxOrig);
cctxOrig = nullptr;
device->close();
device = nullptr;
closeDevice();
biglock = false;
@ -234,6 +210,101 @@ void CameraSource::unsubscribe()
}
}
bool CameraSource::openDevice()
{
qDebug() << "Opening device "<<deviceName;
if (device)
{
device->open();
return true;
}
// We need to create a new CameraDevice
AVCodec* codec;
if (mode)
device = CameraDevice::open(deviceName, mode);
else
device = CameraDevice::open(deviceName);
if (!device)
{
qWarning() << "Failed to open device!";
return false;
}
// We need to open the device as many time as we already have subscribers,
// otherwise the device could get closed while we still have subscribers
for (int i=subscriptions; i>0; i--)
device->open();
// Find the first video stream
for (unsigned i=0; i<device->context->nb_streams; i++)
{
if(device->context->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO)
{
videoStreamIndex=i;
break;
}
}
if (videoStreamIndex == -1)
return false;
// Get a pointer to the codec context for the video stream
cctxOrig=device->context->streams[videoStreamIndex]->codec;
codec=avcodec_find_decoder(cctxOrig->codec_id);
if(!codec)
return false;
// Copy context, since we apparently aren't allowed to use the original
cctx = avcodec_alloc_context3(codec);
if(avcodec_copy_context(cctx, cctxOrig) != 0)
return false;
cctx->refcounted_frames = 1;
// Open codec
if(avcodec_open2(cctx, codec, nullptr)<0)
{
avcodec_free_context(&cctx);
return false;
}
if (streamFuture.isRunning())
qDebug() << "The stream thread is already running! Keeping the current one open.";
else
streamFuture = QtConcurrent::run(std::bind(&CameraSource::stream, this));
// Synchronize with our stream thread
while (!streamFuture.isRunning())
QThread::yieldCurrentThread();
return true;
}
void CameraSource::closeDevice()
{
qDebug() << "Closing device "<<deviceName;
// Free all remaining VideoFrame
// Locking must be done precisely this way to avoid races
for (int i=0; i<freelist.size(); i++)
{
std::shared_ptr<VideoFrame> vframe = freelist[i].lock();
if (!vframe)
continue;
vframe->releaseFrame();
}
// Free our resources and close the device
videoStreamIndex = -1;
avcodec_free_context(&cctx);
avcodec_close(cctxOrig);
cctxOrig = nullptr;
while (device && !device->close()) {}
device = nullptr;
// Memfence so the stream thread sees a nullptr device
std::atomic_thread_fence(std::memory_order_release);
}
void CameraSource::stream()
{
auto streamLoop = [=]()
@ -283,6 +354,8 @@ void CameraSource::stream()
expected = false;
}
// When a thread makes device null, it releases it, so we acquire here
std::atomic_thread_fence(std::memory_order_acquire);
if (!device)
{
biglock = false;
@ -299,7 +372,7 @@ void CameraSource::stream()
void CameraSource::freelistCallback(int freelistIndex)
{
// Fast lock
// Fast spinlock
{
bool expected = false;
while (!freelistLock.compare_exchange_weak(expected, true))

View File

@ -35,22 +35,32 @@ struct AVCodecContext;
* This class is a wrapper to share a camera's captured video frames
* It allows objects to suscribe and unsuscribe to the stream, starting
* the camera and streaming new video frames only when needed.
* This is a singleton, since we can only capture from one
* camera at the same time without thread-safety issues.
* The source is lazy in the sense that it will only keep the video
* device open as long as there are subscribers, the source can be
* open but the device closed if there are zero subscribers.
**/
class CameraSource : public VideoSource
{
Q_OBJECT
public:
CameraSource(); ///< Opens the camera device in the settings, or the system default
CameraSource(const QString deviceName);
CameraSource(const QString deviceName, VideoMode mode);
~CameraSource();
static CameraSource& getInstance();
/// Opens the source for the camera device in argument, in the settings, or the system default
/// If a device is already open, the source will seamlessly switch to the new device
void open();
void open(const QString deviceName);
void open(const QString deviceName, VideoMode mode);
void close(); ///< Equivalent to opening the source with the video device "none". Stops streaming.
// VideoSource interface
virtual bool subscribe() override;
virtual void unsubscribe() override;
private:
CameraSource();
~CameraSource();
/// Blocking. Decodes video stream and emits new frames.
/// Designed to run in its own thread.
void stream();
@ -64,17 +74,22 @@ private:
/// Get the index of a free slot in the freelist
/// Callers must hold the freelistLock
int getFreelistSlotLockless();
bool openDevice(); ///< Callers must own the biglock. Actually opens the video device and starts streaming.
void closeDevice(); ///< Callers must own the biglock. Actually closes the video device and stops streaming.
private:
QVector<std::weak_ptr<VideoFrame>> freelist; ///< Frames that need freeing before we can safely close the device
QFuture<void> streamFuture; ///< Future of the streaming thread
const QString deviceName; ///< Short name of the device for CameraDevice's open(QString)
CameraDevice* device; ///< Non-owning pointer to an open CameraDevice, or nullptr
QString deviceName; ///< Short name of the device for CameraDevice's open(QString)
CameraDevice* device; ///< Non-owning pointer to an open CameraDevice, or nullptr. Not atomic, synced with memfences when becomes null.
VideoMode mode; ///< What mode we tried to open the device in, all zeros means default mode
AVCodecContext* cctx, *cctxOrig; ///< Codec context of the camera's selected video stream
int videoStreamIndex; ///< A camera can have multiple streams, this is the one we're decoding
std::atomic_bool biglock, freelistLock, isNull; ///< True when locked. Faster than mutexes for video decoding.
std::atomic_bool biglock, freelistLock; ///< True when locked. Faster than mutexes for video decoding.
std::atomic_bool isOpen;
std::atomic_int subscriptions; ///< Remember how many times we subscribed for RAII
static CameraSource* instance;
};
#endif // CAMERA_H

View File

@ -25,6 +25,7 @@ extern "C" {
#include <libswscale/swscale.h>
}
#include "videoframe.h"
#include "camerasource.h"
VideoFrame::VideoFrame(AVFrame* frame, int w, int h, int fmt, std::function<void()> freelistCallback)
: freelistCallback{freelistCallback},
@ -106,7 +107,7 @@ bool VideoFrame::convertToRGB24(QSize size)
}
else
{
qCritical() << "None of the frames are valid! Did someone release us?";
qWarning() << "None of the frames are valid! Did someone release us?";
return false;
}

View File

@ -42,7 +42,7 @@
AVForm::AVForm() :
GenericForm(QPixmap(":/img/settings/av.png")),
camVideoSurface{nullptr}, camera{nullptr}
camVideoSurface{nullptr}, camera{CameraSource::getInstance()}
{
bodyUI = new Ui::AVSettings;
bodyUI->setupUi(this);
@ -76,11 +76,6 @@ AVForm::~AVForm()
{
Translator::unregister(this);
delete bodyUI;
if (camera)
{
delete camera;
camera = nullptr;
}
}
void AVForm::showEvent(QShowEvent*)
@ -107,11 +102,7 @@ void AVForm::on_videoModescomboBox_currentIndexChanged(int index)
QString devName = videoDeviceList[devIndex].first;
VideoMode mode = videoModes[index];
Settings::getInstance().setCamVideoRes(QSize(mode.width, mode.height));
camVideoSurface->setSource(nullptr);
if (camera)
delete camera;
camera = new CameraSource(devName, mode);
camVideoSurface->setSource(camera);
camera.open(devName, mode);
}
void AVForm::updateVideoModes(int curIndex)
@ -128,7 +119,7 @@ void AVForm::updateVideoModes(int curIndex)
{return a.width!=b.width ? a.width>b.width :
a.height!=b.height ? a.height>b.height :
a.FPS>b.FPS;});
bodyUI->videoModescomboBox->blockSignals(true);
bool previouslyBlocked = bodyUI->videoModescomboBox->blockSignals(true);
bodyUI->videoModescomboBox->clear();
int prefResIndex = -1;
QSize prefRes = Settings::getInstance().getCamVideoRes();
@ -148,7 +139,7 @@ void AVForm::updateVideoModes(int curIndex)
}
if (videoModes.isEmpty())
bodyUI->videoModescomboBox->addItem(tr("Default resolution"));
bodyUI->videoModescomboBox->blockSignals(false);
bodyUI->videoModescomboBox->blockSignals(previouslyBlocked);
if (prefResIndex != -1)
{
bodyUI->videoModescomboBox->setCurrentIndex(prefResIndex);
@ -197,17 +188,12 @@ void AVForm::onVideoDevChanged(int index)
qWarning() << "Invalid index";
return;
}
camVideoSurface->setSource(nullptr);
if (camera)
{
delete camera;
camera = nullptr;
}
QString dev = videoDeviceList[index].first;
Settings::getInstance().setVideoDev(dev);
bool previouslyBlocked = bodyUI->videoModescomboBox->blockSignals(true);
updateVideoModes(index);
camera = new CameraSource(dev);
camVideoSurface->setSource(camera);
bodyUI->videoModescomboBox->blockSignals(previouslyBlocked);
camera.open(dev);
}
void AVForm::onResProbingFinished(QList<QSize> res)
@ -240,11 +226,6 @@ void AVForm::hideEvent(QHideEvent *)
camVideoSurface->setSource(nullptr);
killVideoSurface();
}
if (camera)
{
delete camera;
camera = nullptr;
}
videoDeviceList.clear();
}
@ -264,8 +245,8 @@ void AVForm::getVideoDevices()
}
//addItem changes currentIndex -> reset
bodyUI->videoDevCombobox->setCurrentIndex(-1);
bodyUI->videoDevCombobox->blockSignals(false);
bodyUI->videoDevCombobox->setCurrentIndex(videoDevIndex);
bodyUI->videoDevCombobox->blockSignals(false);
updateVideoModes(videoDevIndex);
}
@ -382,6 +363,7 @@ void AVForm::createVideoSurface()
camVideoSurface = new VideoSurface(bodyUI->CamFrame);
camVideoSurface->setObjectName(QStringLiteral("CamVideoSurface"));
camVideoSurface->setMinimumSize(QSize(160, 120));
camVideoSurface->setSource(&camera);
bodyUI->gridLayout->addWidget(camVideoSurface, 0, 0, 1, 1);
}

View File

@ -75,7 +75,7 @@ protected:
private:
Ui::AVSettings *bodyUI;
VideoSurface* camVideoSurface;
CameraSource* camera;
CameraSource& camera;
QVector<QPair<QString, QString>> videoDeviceList;
QVector<VideoMode> videoModes;
};