1
0
mirror of https://github.com/qTox/qTox.git synced 2024-03-22 14:00:36 +08:00

docs(audio, video): Change comment style

This commit is contained in:
Diadlo 2016-07-27 01:18:57 +03:00
parent 3edd84c4d8
commit 29443040fb
No known key found for this signature in database
GPG Key ID: 5AF9F2E29107C727
15 changed files with 357 additions and 128 deletions

View File

@ -33,8 +33,6 @@
#include <cassert> #include <cassert>
/** /**
@internal
@class Audio::Private @class Audio::Private
@brief Encapsulates private audio framework from public qTox Audio API. @brief Encapsulates private audio framework from public qTox Audio API.
@ -88,7 +86,28 @@ private:
}; };
/** /**
Returns the singleton instance. @class Audio
@fn void Audio::frameAvailable(const int16_t *pcm, size_t sample_count, uint8_t channels, uint32_t sampling_rate);
When there are input subscribers, we regularly emit captured audio frames with this signal
Always connect with a blocking queued connection lambda, else the behaviour is undefined
@var Audio::AUDIO_SAMPLE_RATE
@brief The next best Opus would take is 24k
@var Audio::AUDIO_FRAME_DURATION
@brief In milliseconds
@var Audio::AUDIO_FRAME_SAMPLE_COUNT
@brief Frame sample count
@var Audio::AUDIO_CHANNELS
@brief Ideally, we'd auto-detect, but that's a sane default
*/
/**
@brief Returns the singleton instance.
*/ */
Audio& Audio::getInstance() Audio& Audio::getInstance()
{ {
@ -150,7 +169,7 @@ void Audio::checkAlcError(ALCdevice *device) noexcept
} }
/** /**
Returns the current output volume (between 0 and 1) @brief Returns the current output volume (between 0 and 1)
*/ */
qreal Audio::outputVolume() const qreal Audio::outputVolume() const
{ {
@ -168,7 +187,7 @@ qreal Audio::outputVolume() const
} }
/** /**
Set the master output volume. @brief Set the master output volume.
@param[in] volume the master volume (between 0 and 1) @param[in] volume the master volume (between 0 and 1)
*/ */
@ -238,7 +257,7 @@ qreal Audio::inputGain() const
} }
/** /**
Set the input gain dB level. @brief Set the input gain dB level.
*/ */
void Audio::setInputGain(qreal dB) void Audio::setInputGain(qreal dB)
{ {
@ -299,7 +318,7 @@ void Audio::unsubscribeInput()
} }
/** /**
Initialize audio input device, if not initialized. @brief Initialize audio input device, if not initialized.
@return true, if device was initialized; false otherwise @return true, if device was initialized; false otherwise
*/ */
@ -309,7 +328,7 @@ bool Audio::autoInitInput()
} }
/** /**
Initialize audio output device, if not initialized. @brief Initialize audio output device, if not initialized.
@return true, if device was initialized; false otherwise @return true, if device was initialized; false otherwise
*/ */
@ -354,9 +373,7 @@ bool Audio::initInput(const QString& deviceName)
} }
/** /**
@internal @brief Open an audio output device
Open an audio output device
*/ */
bool Audio::initOutput(const QString& deviceName) bool Audio::initOutput(const QString& deviceName)
{ {
@ -409,7 +426,7 @@ bool Audio::initOutput(const QString& deviceName)
} }
/** /**
Play a 44100Hz mono 16bit PCM sound from a file @brief Play a 44100Hz mono 16bit PCM sound from a file
*/ */
void Audio::playMono16Sound(const QString& path) void Audio::playMono16Sound(const QString& path)
{ {
@ -419,7 +436,7 @@ void Audio::playMono16Sound(const QString& path)
} }
/** /**
Play a 44100Hz mono 16bit PCM sound @brief Play a 44100Hz mono 16bit PCM sound
*/ */
void Audio::playMono16Sound(const QByteArray& data) void Audio::playMono16Sound(const QByteArray& data)
{ {
@ -488,9 +505,7 @@ void Audio::playAudioBuffer(ALuint alSource, const int16_t *data, int samples, u
} }
/** /**
@internal @brief Close active audio input device.
Close active audio input device.
*/ */
void Audio::cleanupInput() void Audio::cleanupInput()
{ {
@ -506,9 +521,7 @@ void Audio::cleanupInput()
} }
/** /**
@internal @brief Close active audio output device
Close active audio output device
*/ */
void Audio::cleanupOutput() void Audio::cleanupOutput()
{ {
@ -540,6 +553,9 @@ void Audio::cleanupOutput()
} }
} }
/**
@brief Called after a mono16 sound stopped playing
*/
void Audio::playMono16SoundCleanup() void Audio::playMono16SoundCleanup()
{ {
QMutexLocker locker(&audioLock); QMutexLocker locker(&audioLock);
@ -554,6 +570,9 @@ void Audio::playMono16SoundCleanup()
} }
} }
/**
@brief Called on the captureTimer events to capture audio
*/
void Audio::doCapture() void Audio::doCapture()
{ {
QMutexLocker lock(&audioLock); QMutexLocker lock(&audioLock);
@ -583,7 +602,7 @@ void Audio::doCapture()
} }
/** /**
Returns true if the output device is open @brief Returns true if the output device is open
*/ */
bool Audio::isOutputReady() const bool Audio::isOutputReady() const
{ {

View File

@ -42,13 +42,6 @@
#include <AL/alext.h> #include <AL/alext.h>
#endif #endif
// Public default audio settings
static constexpr uint32_t AUDIO_SAMPLE_RATE = 48000; ///< The next best Opus would take is 24k
static constexpr uint32_t AUDIO_FRAME_DURATION = 20; ///< In milliseconds
static constexpr ALint AUDIO_FRAME_SAMPLE_COUNT = AUDIO_FRAME_DURATION * AUDIO_SAMPLE_RATE/1000;
static constexpr uint32_t AUDIO_CHANNELS = 2; ///< Ideally, we'd auto-detect, but that's a sane default
class Audio : public QObject class Audio : public QObject
{ {
Q_OBJECT Q_OBJECT
@ -92,10 +85,15 @@ public:
void playAudioBuffer(ALuint alSource, const int16_t *data, int samples, void playAudioBuffer(ALuint alSource, const int16_t *data, int samples,
unsigned channels, int sampleRate); unsigned channels, int sampleRate);
public:
// Public default audio settings
static constexpr uint32_t AUDIO_SAMPLE_RATE = 48000;
static constexpr uint32_t AUDIO_FRAME_DURATION = 20;
static constexpr ALint AUDIO_FRAME_SAMPLE_COUNT = AUDIO_FRAME_DURATION * AUDIO_SAMPLE_RATE/1000;
static constexpr uint32_t AUDIO_CHANNELS = 2;
signals: signals:
void groupAudioPlayed(int group, int peer, unsigned short volume); void groupAudioPlayed(int group, int peer, unsigned short volume);
/// When there are input subscribers, we regularly emit captured audio frames with this signal
/// Always connect with a blocking queued connection or a lambda, or the behavior is undefined
void frameAvailable(const int16_t *pcm, size_t sample_count, uint8_t channels, uint32_t sampling_rate); void frameAvailable(const int16_t *pcm, size_t sample_count, uint8_t channels, uint32_t sampling_rate);
private: private:
@ -111,12 +109,9 @@ private:
bool initOutput(const QString& outDevDescr); bool initOutput(const QString& outDevDescr);
void cleanupInput(); void cleanupInput();
void cleanupOutput(); void cleanupOutput();
/// Called after a mono16 sound stopped playing
void playMono16SoundCleanup(); void playMono16SoundCleanup();
/// Called on the captureTimer events to capture audio
void doCapture(); void doCapture();
private: private:
Private* d; Private* d;

View File

@ -38,6 +38,28 @@ extern "C" {
#include "src/platform/camera/avfoundation.h" #include "src/platform/camera/avfoundation.h"
#endif #endif
/**
@class CameraDevice
Maintains an FFmpeg context for open camera devices,
takes care of sharing the context accross users and closing
the camera device when not in use. The device can be opened
recursively, and must then be closed recursively
*/
/**
@var const QString CameraDevice::devName
@brief Short name of the device
@var AVFormatContext* CameraDevice::context
@brief Context of the open device, must always be valid
@var std::atomic_int CameraDevice::refcount;
@brief Number of times the device was opened
*/
QHash<QString, CameraDevice*> CameraDevice::openDevices; QHash<QString, CameraDevice*> CameraDevice::openDevices;
QMutex CameraDevice::openDeviceLock, CameraDevice::iformatLock; QMutex CameraDevice::openDeviceLock, CameraDevice::iformatLock;
AVInputFormat* CameraDevice::iformat{nullptr}; AVInputFormat* CameraDevice::iformat{nullptr};
@ -103,6 +125,18 @@ out:
return dev; return dev;
} }
/**
@brief Opens a device.
Opens a device, creating a new one if needed
If the device is alreay open in another mode, the mode
will be ignored and the existing device is used
If the mode does not exist, a new device can't be opened.
@param devName Device name to open.
@param mode Mode of device to open.
@return CameraDevice if the device could be opened, nullptr otherwise.
*/
CameraDevice* CameraDevice::open(QString devName, VideoMode mode) CameraDevice* CameraDevice::open(QString devName, VideoMode mode)
{ {
if (!getDefaultInputFormat()) if (!getDefaultInputFormat())
@ -205,11 +239,20 @@ CameraDevice* CameraDevice::open(QString devName, VideoMode mode)
return dev; return dev;
} }
/**
@brief Opens the device again. Never fails
*/
void CameraDevice::open() void CameraDevice::open()
{ {
++refcount; ++refcount;
} }
/**
@brief Closes the device. Never fails.
@note If returns true, "this" becomes invalid.
@return True, if device finally deleted (closed last reference),
false otherwise (if other references exist).
*/
bool CameraDevice::close() bool CameraDevice::close()
{ {
if (--refcount > 0) if (--refcount > 0)
@ -223,6 +266,11 @@ bool CameraDevice::close()
return true; return true;
} }
/**
@brief Get raw device list
@note Uses avdevice_list_devices
@return Raw device list
*/
QVector<QPair<QString, QString>> CameraDevice::getRawDeviceListGeneric() QVector<QPair<QString, QString>> CameraDevice::getRawDeviceListGeneric()
{ {
QVector<QPair<QString, QString>> devices; QVector<QPair<QString, QString>> devices;
@ -234,11 +282,13 @@ QVector<QPair<QString, QString>> CameraDevice::getRawDeviceListGeneric()
AVFormatContext *s; AVFormatContext *s;
if (!(s = avformat_alloc_context())) if (!(s = avformat_alloc_context()))
return devices; return devices;
if (!iformat->priv_class || !AV_IS_INPUT_DEVICE(iformat->priv_class->category)) if (!iformat->priv_class || !AV_IS_INPUT_DEVICE(iformat->priv_class->category))
{ {
avformat_free_context(s); avformat_free_context(s);
return devices; return devices;
} }
s->iformat = iformat; s->iformat = iformat;
if (s->iformat->priv_data_size > 0) if (s->iformat->priv_data_size > 0)
{ {
@ -290,6 +340,11 @@ QVector<QPair<QString, QString>> CameraDevice::getRawDeviceListGeneric()
return devices; return devices;
} }
/**
@brief Get device list with desciption
@return A list of device names and descriptions.
The names are the first part of the pair and can be passed to open(QString).
*/
QVector<QPair<QString, QString>> CameraDevice::getDeviceList() QVector<QPair<QString, QString>> CameraDevice::getDeviceList()
{ {
QVector<QPair<QString, QString>> devices; QVector<QPair<QString, QString>> devices;
@ -336,6 +391,11 @@ QVector<QPair<QString, QString>> CameraDevice::getDeviceList()
return devices; return devices;
} }
/**
@brief Get the default device name.
@return The short name of the default device
This is either the device in the settings or the system default.
*/
QString CameraDevice::getDefaultDeviceName() QString CameraDevice::getDefaultDeviceName()
{ {
QString defaultdev = Settings::getInstance().getVideoDev(); QString defaultdev = Settings::getInstance().getVideoDev();
@ -354,11 +414,20 @@ QString CameraDevice::getDefaultDeviceName()
return devlist[0].first; return devlist[0].first;
} }
/**
@brief Checks if a device name specifies a display.
@param devName Device name to check.
@return True, if device is screen, false otherwise.
*/
bool CameraDevice::isScreen(const QString &devName) bool CameraDevice::isScreen(const QString &devName)
{ {
return devName.startsWith("x11grab") || devName.startsWith("gdigrab"); return devName.startsWith("x11grab") || devName.startsWith("gdigrab");
} }
/**
@brief Get list of resolutions and position of screens
@return Vector of avaliable screen modes with offset
*/
QVector<VideoMode> CameraDevice::getScreenModes() QVector<VideoMode> CameraDevice::getScreenModes()
{ {
QList<QScreen*> screens = QApplication::screens(); QList<QScreen*> screens = QApplication::screens();
@ -376,6 +445,11 @@ QVector<VideoMode> CameraDevice::getScreenModes()
return result; return result;
} }
/**
@brief Get the list of video modes for a device.
@param devName Device name to get nodes from.
@return Vector of available modes for the device.
*/
QVector<VideoMode> CameraDevice::getVideoModes(QString devName) QVector<VideoMode> CameraDevice::getVideoModes(QString devName)
{ {
Q_UNUSED(devName); Q_UNUSED(devName);
@ -401,6 +475,11 @@ QVector<VideoMode> CameraDevice::getVideoModes(QString devName)
return {}; return {};
} }
/**
@brief Get the name of the pixel format of a video mode.
@param pixel_format Pixel format to get the name from.
@return Name of the pixel format.
*/
QString CameraDevice::getPixelFormatString(uint32_t pixel_format) QString CameraDevice::getPixelFormatString(uint32_t pixel_format)
{ {
#ifdef Q_OS_LINUX #ifdef Q_OS_LINUX
@ -410,6 +489,13 @@ QString CameraDevice::getPixelFormatString(uint32_t pixel_format)
#endif #endif
} }
/**
@brief Compare two pixel formats.
@param a First pixel format to compare.
@param b Second pixel format to compare.
@return True if we prefer format a to b,
false otherwise (such as if there's no preference).
*/
bool CameraDevice::betterPixelFormat(uint32_t a, uint32_t b) bool CameraDevice::betterPixelFormat(uint32_t a, uint32_t b)
{ {
#ifdef Q_OS_LINUX #ifdef Q_OS_LINUX
@ -419,6 +505,10 @@ bool CameraDevice::betterPixelFormat(uint32_t a, uint32_t b)
#endif #endif
} }
/**
@brief Sets CameraDevice::iformat to default.
@return True if success, false if failure.
*/
bool CameraDevice::getDefaultInputFormat() bool CameraDevice::getDefaultInputFormat()
{ {
QMutexLocker locker(&iformatLock); QMutexLocker locker(&iformatLock);

View File

@ -33,55 +33,36 @@ struct AVInputFormat;
struct AVDeviceInfoList; struct AVDeviceInfoList;
struct AVDictionary; struct AVDictionary;
/// Maintains an FFmpeg context for open camera devices,
/// takes care of sharing the context accross users
/// and closing the camera device when not in use.
/// The device can be opened recursively,
/// and must then be closed recursively
class CameraDevice class CameraDevice
{ {
public: public:
/// Opens a device, creating a new one if needed
/// If the device is alreay open in another mode, the mode
/// will be ignored and the existing device is used
/// If the mode does not exist, a new device can't be opened
/// Returns a nullptr if the device couldn't be opened
static CameraDevice* open(QString devName, VideoMode mode = VideoMode()); static CameraDevice* open(QString devName, VideoMode mode = VideoMode());
void open(); ///< Opens the device again. Never fails void open();
bool close(); ///< Closes the device. Never fails. If returns true, "this" becomes invalid bool close();
/// Returns a list of device names and descriptions
/// The names are the first part of the pair and can be passed to open(QString)
static QVector<QPair<QString, QString>> getDeviceList(); static QVector<QPair<QString, QString>> getDeviceList();
/// Get the list of video modes for a device
static QVector<VideoMode> getVideoModes(QString devName); static QVector<VideoMode> getVideoModes(QString devName);
/// Get the name of the pixel format of a video mode
static QString getPixelFormatString(uint32_t pixel_format); static QString getPixelFormatString(uint32_t pixel_format);
/// Returns true if we prefer format a to b, false otherwise (such as if there's no preference)
static bool betterPixelFormat(uint32_t a, uint32_t b); static bool betterPixelFormat(uint32_t a, uint32_t b);
/// Returns the short name of the default defice
/// This is either the device in the settings
/// or the system default.
static QString getDefaultDeviceName(); static QString getDefaultDeviceName();
/// Checks if a device name specifies a display
static bool isScreen(const QString &devName); static bool isScreen(const QString &devName);
private: private:
CameraDevice(const QString &devName, AVFormatContext *context); CameraDevice(const QString &devName, AVFormatContext *context);
static CameraDevice* open(QString devName, AVDictionary** options); static CameraDevice* open(QString devName, AVDictionary** options);
static bool getDefaultInputFormat(); ///< Sets CameraDevice::iformat, returns success/failure static bool getDefaultInputFormat();
static QVector<QPair<QString, QString> > getRawDeviceListGeneric(); ///< Uses avdevice_list_devices static QVector<QPair<QString, QString> > getRawDeviceListGeneric();
static QVector<VideoMode> getScreenModes(); ///< Returns avaliable screen modes with offset static QVector<VideoMode> getScreenModes();
public: public:
const QString devName; ///< Short name of the device const QString devName;
AVFormatContext* context; ///< Context of the open device, must always be valid AVFormatContext* context;
private: private:
std::atomic_int refcount; ///< Number of times the device was opened std::atomic_int refcount;
static QHash<QString, CameraDevice*> openDevices; static QHash<QString, CameraDevice*> openDevices;
static QMutex openDeviceLock, iformatLock; static QMutex openDeviceLock, iformatLock;
static AVInputFormat* iformat, *idesktopFormat; static AVInputFormat* iformat, *idesktopFormat;

View File

@ -33,6 +33,57 @@ extern "C" {
#include "cameradevice.h" #include "cameradevice.h"
#include "videoframe.h" #include "videoframe.h"
/**
@class CameraSource
@brief This class is a wrapper to share a camera's captured video frames
It allows objects to suscribe and unsuscribe to the stream, starting
the camera and streaming new video frames only when needed.
This is a singleton, since we can only capture from one
camera at the same time without thread-safety issues.
The source is lazy in the sense that it will only keep the video
device open as long as there are subscribers, the source can be
open but the device closed if there are zero subscribers.
*/
/**
@var QVector<std::weak_ptr<VideoFrame>> CameraSource::freelist
@brief Frames that need freeing before we can safely close the device
@var QFuture<void> CameraSource::streamFuture
@brief Future of the streaming thread
@var QString CameraSource::deviceName
@brief Short name of the device for CameraDevice's open(QString)
@var CameraDevice* CameraSource::device
@brief Non-owning pointer to an open CameraDevice, or nullptr. Not atomic, synced with memfences when becomes null.
@var VideoMode CameraSource::mode
@brief What mode we tried to open the device in, all zeros means default mode
@var AVCodecContext* CameraSource::cctx
@brief Codec context of the camera's selected video stream
@var AVCodecContext* CameraSource::cctxOrig
@brief Codec context of the camera's selected video stream
@var int CameraSource::videoStreamIndex
@brief A camera can have multiple streams, this is the one we're decoding
@var QMutex CameraSource::biglock
@brief True when locked. Faster than mutexes for video decoding.
@var QMutex CameraSource::freelistLock
@brief True when locked. Faster than mutexes for video decoding.
@var std::atomic_bool CameraSource::streamBlocker
@brief Holds the streaming thread still when true
@var std::atomic_int CameraSource::subscriptions
@brief Remember how many times we subscribed for RAII
*/
CameraSource* CameraSource::instance{nullptr}; CameraSource* CameraSource::instance{nullptr};
CameraSource::CameraSource() CameraSource::CameraSource()
@ -45,6 +96,9 @@ CameraSource::CameraSource()
avdevice_register_all(); avdevice_register_all();
} }
/**
@brief Returns the singleton instance.
*/
CameraSource& CameraSource::getInstance() CameraSource& CameraSource::getInstance()
{ {
if (!instance) if (!instance)
@ -61,6 +115,12 @@ void CameraSource::destroyInstance()
} }
} }
/**
@brief Opens the source for the camera device.
@note If a device is already open, the source will seamlessly switch to the new device.
Opens the source for the camera device in argument, in the settings, or the system default.
*/
void CameraSource::open() void CameraSource::open()
{ {
open(CameraDevice::getDefaultDeviceName()); open(CameraDevice::getDefaultDeviceName());
@ -103,6 +163,11 @@ void CameraSource::open(const QString& DeviceName, VideoMode Mode)
streamBlocker = false; streamBlocker = false;
} }
/**
@brief Stops streaming.
Equivalent to opening the source with the video device "none".
*/
void CameraSource::close() void CameraSource::close()
{ {
open("none"); open("none");
@ -214,6 +279,11 @@ void CameraSource::unsubscribe()
subscriptions--; subscriptions--;
} }
/**
@brief Opens the video device and starts streaming.
@note Callers must own the biglock.
@return True if success, false otherwise.
*/
bool CameraSource::openDevice() bool CameraSource::openDevice()
{ {
qDebug() << "Opening device " << deviceName; qDebug() << "Opening device " << deviceName;
@ -296,6 +366,10 @@ bool CameraSource::openDevice()
return true; return true;
} }
/**
@brief Closes the video device and stops streaming.
@note Callers must own the biglock.
*/
void CameraSource::closeDevice() void CameraSource::closeDevice()
{ {
qDebug() << "Closing device "<<deviceName; qDebug() << "Closing device "<<deviceName;
@ -324,6 +398,10 @@ void CameraSource::closeDevice()
std::atomic_thread_fence(std::memory_order_release); std::atomic_thread_fence(std::memory_order_release);
} }
/**
@brief Blocking. Decodes video stream and emits new frames.
@note Designed to run in its own thread.
*/
void CameraSource::stream() void CameraSource::stream()
{ {
auto streamLoop = [=]() auto streamLoop = [=]()
@ -384,12 +462,28 @@ void CameraSource::stream()
} }
} }
/**
@brief CameraSource::freelistCallback
@param freelistIndex
All VideoFrames must be deleted or released before we can close the device
or the device will forcibly free them, and then ~VideoFrame() will double free.
In theory very careful coding from our users could ensure all VideoFrames
die before unsubscribing, even the ones currently in flight in the metatype system.
But that's just asking for trouble and mysterious crashes, so we'll just
maintain a freelist and have all VideoFrames tell us when they die so we can forget them.
*/
void CameraSource::freelistCallback(int freelistIndex) void CameraSource::freelistCallback(int freelistIndex)
{ {
QMutexLocker l{&freelistLock}; QMutexLocker l{&freelistLock};
freelist[freelistIndex].reset(); freelist[freelistIndex].reset();
} }
/**
@brief Get the index of a free slot in the freelist.
@note Callers must hold the freelistLock.
@return Index of a free slot.
*/
int CameraSource::getFreelistSlotLockless() int CameraSource::getFreelistSlotLockless()
{ {
int size = freelist.size(); int size = freelist.size();

View File

@ -31,17 +31,6 @@
class CameraDevice; class CameraDevice;
struct AVCodecContext; struct AVCodecContext;
/**
* This class is a wrapper to share a camera's captured video frames
* It allows objects to suscribe and unsuscribe to the stream, starting
* the camera and streaming new video frames only when needed.
* This is a singleton, since we can only capture from one
* camera at the same time without thread-safety issues.
* The source is lazy in the sense that it will only keep the video
* device open as long as there are subscribers, the source can be
* open but the device closed if there are zero subscribers.
**/
class CameraSource : public VideoSource class CameraSource : public VideoSource
{ {
Q_OBJECT Q_OBJECT
@ -49,12 +38,10 @@ class CameraSource : public VideoSource
public: public:
static CameraSource& getInstance(); static CameraSource& getInstance();
static void destroyInstance(); static void destroyInstance();
/// Opens the source for the camera device in argument, in the settings, or the system default
/// If a device is already open, the source will seamlessly switch to the new device
void open(); void open();
void open(const QString& deviceName); void open(const QString& deviceName);
void open(const QString& deviceName, VideoMode mode); void open(const QString& deviceName, VideoMode mode);
void close(); ///< Equivalent to opening the source with the video device "none". Stops streaming. void close();
bool isOpen(); bool isOpen();
// VideoSource interface // VideoSource interface
@ -67,34 +54,24 @@ signals:
private: private:
CameraSource(); CameraSource();
~CameraSource(); ~CameraSource();
/// Blocking. Decodes video stream and emits new frames.
/// Designed to run in its own thread.
void stream(); void stream();
/// All VideoFrames must be deleted or released before we can close the device
/// or the device will forcibly free them, and then ~VideoFrame() will double free.
/// In theory very careful coding from our users could ensure all VideoFrames
/// die before unsubscribing, even the ones currently in flight in the metatype system.
/// But that's just asking for trouble and mysterious crashes, so we'll just
/// maintain a freelist and have all VideoFrames tell us when they die so we can forget them.
void freelistCallback(int freelistIndex); void freelistCallback(int freelistIndex);
/// Get the index of a free slot in the freelist
/// Callers must hold the freelistLock
int getFreelistSlotLockless(); int getFreelistSlotLockless();
bool openDevice(); ///< Callers must own the biglock. Actually opens the video device and starts streaming. bool openDevice();
void closeDevice(); ///< Callers must own the biglock. Actually closes the video device and stops streaming. void closeDevice();
private: private:
QVector<std::weak_ptr<VideoFrame>> freelist; ///< Frames that need freeing before we can safely close the device QVector<std::weak_ptr<VideoFrame>> freelist;
QFuture<void> streamFuture; ///< Future of the streaming thread QFuture<void> streamFuture;
QString deviceName; ///< Short name of the device for CameraDevice's open(QString) QString deviceName;
CameraDevice* device; ///< Non-owning pointer to an open CameraDevice, or nullptr. Not atomic, synced with memfences when becomes null. CameraDevice* device;
VideoMode mode; ///< What mode we tried to open the device in, all zeros means default mode VideoMode mode;
AVCodecContext* cctx, *cctxOrig; ///< Codec context of the camera's selected video stream AVCodecContext* cctx, *cctxOrig;
int videoStreamIndex; ///< A camera can have multiple streams, this is the one we're decoding int videoStreamIndex;
QMutex biglock, freelistLock; ///< True when locked. Faster than mutexes for video decoding. QMutex biglock, freelistLock;
std::atomic_bool _isOpen; std::atomic_bool _isOpen;
std::atomic_bool streamBlocker; ///< Holds the streaming thread still when true std::atomic_bool streamBlocker;
std::atomic_int subscriptions; ///< Remember how many times we subscribed for RAII std::atomic_int subscriptions;
static CameraSource* instance; static CameraSource* instance;
}; };

View File

@ -25,12 +25,34 @@ extern "C" {
#include "corevideosource.h" #include "corevideosource.h"
#include "videoframe.h" #include "videoframe.h"
/**
@class CoreVideoSource
@brief A VideoSource that emits frames received by Core.
*/
/**
@var std::atomic_int subscribers
@brief Number of suscribers
@var std::atomic_bool deleteOnClose
@brief If true, self-delete after the last suscriber is gone
*/
/**
@brief CoreVideoSource constructor.
@note Only CoreAV should create a CoreVideoSource since
only CoreAV can push images to it.
*/
CoreVideoSource::CoreVideoSource() CoreVideoSource::CoreVideoSource()
: subscribers{0}, deleteOnClose{false}, : subscribers{0}, deleteOnClose{false},
stopped{false} stopped{false}
{ {
} }
/**
@brief Makes a copy of the vpx_image_t and emits it as a new VideoFrame.
@param vpxframe Frame to copy.
*/
void CoreVideoSource::pushFrame(const vpx_image_t* vpxframe) void CoreVideoSource::pushFrame(const vpx_image_t* vpxframe)
{ {
if (stopped) if (stopped)
@ -108,12 +130,22 @@ void CoreVideoSource::unsubscribe()
biglock.unlock(); biglock.unlock();
} }
/**
@brief Setup delete on close
@param If true, self-delete after the last suscriber is gone
*/
void CoreVideoSource::setDeleteOnClose(bool newstate) void CoreVideoSource::setDeleteOnClose(bool newstate)
{ {
QMutexLocker locker(&biglock); QMutexLocker locker(&biglock);
deleteOnClose = newstate; deleteOnClose = newstate;
} }
/**
@brief Stopping the source.
@see The callers in CoreAV for the rationale
Stopping the source will block any pushFrame calls from doing anything
*/
void CoreVideoSource::stopSource() void CoreVideoSource::stopSource()
{ {
QMutexLocker locker(&biglock); QMutexLocker locker(&biglock);

View File

@ -26,7 +26,6 @@
#include "videosource.h" #include "videosource.h"
#include <QMutex> #include <QMutex>
/// A VideoSource that emits frames received by Core
class CoreVideoSource : public VideoSource class CoreVideoSource : public VideoSource
{ {
Q_OBJECT Q_OBJECT
@ -36,23 +35,17 @@ public:
virtual void unsubscribe() override; virtual void unsubscribe() override;
private: private:
// Only CoreAV should create a CoreVideoSource since
// only CoreAV can push images to it
CoreVideoSource(); CoreVideoSource();
/// Makes a copy of the vpx_image_t and emits it as a new VideoFrame
void pushFrame(const vpx_image_t *frame); void pushFrame(const vpx_image_t *frame);
/// If true, self-delete after the last suscriber is gone
void setDeleteOnClose(bool newstate); void setDeleteOnClose(bool newstate);
/// Stopping the source will block any pushFrame calls from doing anything
/// See the callers in CoreAV for the rationale
void stopSource(); void stopSource();
void restartSource(); void restartSource();
private: private:
std::atomic_int subscribers; ///< Number of suscribers std::atomic_int subscribers;
std::atomic_bool deleteOnClose; ///< If true, self-delete after the last suscriber is gone std::atomic_bool deleteOnClose;
QMutex biglock; QMutex biglock;
std::atomic_bool stopped; std::atomic_bool stopped;

View File

@ -35,6 +35,7 @@ NetCamView::NetCamView(int friendId, QWidget* parent)
: GenericNetCamView(parent) : GenericNetCamView(parent)
, selfFrame{nullptr} , selfFrame{nullptr}
, friendId{friendId} , friendId{friendId}
, e(false)
{ {
QString id = FriendList::findFriend(friendId)->getToxId().toString(); QString id = FriendList::findFriend(friendId)->getToxId().toString();
videoSurface = new VideoSurface(Nexus::getProfile()->loadAvatar(id), this); videoSurface = new VideoSurface(Nexus::getProfile()->loadAvatar(id), this);

View File

@ -55,7 +55,7 @@ private:
VideoSurface* selfVideoSurface; VideoSurface* selfVideoSurface;
MovableWidget* selfFrame; MovableWidget* selfFrame;
int friendId; int friendId;
bool e = false; bool e;
QVector<QMetaObject::Connection> connections; QVector<QMetaObject::Connection> connections;
}; };

View File

@ -30,6 +30,17 @@ extern "C" {
#include "videoframe.h" #include "videoframe.h"
#include "camerasource.h" #include "camerasource.h"
/**
@class VideoFrame
VideoFrame takes ownership of an AVFrame* and allows fast conversions to other formats
Ownership of all video frame buffers is kept by the VideoFrame, even after conversion
All references to the frame data become invalid when the VideoFrame is deleted
We try to avoid pixel format conversions as much as possible, at the cost of some memory
All methods are thread-safe. If provided freelistCallback will be called by the destructor,
unless releaseFrame was called in between.
*/
VideoFrame::VideoFrame(AVFrame* frame, int w, int h, int fmt, std::function<void()> freelistCallback) VideoFrame::VideoFrame(AVFrame* frame, int w, int h, int fmt, std::function<void()> freelistCallback)
: freelistCallback{freelistCallback}, : freelistCallback{freelistCallback},
frameOther{nullptr}, frameYUV420{nullptr}, frameRGB24{nullptr}, frameOther{nullptr}, frameYUV420{nullptr}, frameRGB24{nullptr},
@ -70,6 +81,10 @@ VideoFrame::VideoFrame(AVFrame* frame)
{ {
} }
/**
@brief VideoFrame constructor. Disable copy.
@note Use a shared_ptr if you need copies.
*/
VideoFrame::~VideoFrame() VideoFrame::~VideoFrame()
{ {
if (freelistCallback) if (freelistCallback)
@ -78,6 +93,11 @@ VideoFrame::~VideoFrame()
releaseFrameLockless(); releaseFrameLockless();
} }
/**
@brief Converts the VideoFrame to a QImage that shares our internal video buffer.
@param size Size of resulting image.
@return Converted image to RGB24 color model.
*/
QImage VideoFrame::toQImage(QSize size) QImage VideoFrame::toQImage(QSize size)
{ {
if (!convertToRGB24(size)) if (!convertToRGB24(size))
@ -88,6 +108,11 @@ QImage VideoFrame::toQImage(QSize size)
return QImage(*frameRGB24->data, frameRGB24->width, frameRGB24->height, *frameRGB24->linesize, QImage::Format_RGB888); return QImage(*frameRGB24->data, frameRGB24->width, frameRGB24->height, *frameRGB24->linesize, QImage::Format_RGB888);
} }
/**
@brief Converts the VideoFrame to a vpx_image_t.
Converts the VideoFrame to a vpx_image_t that shares our internal video buffer.
@return Converted image to vpx_image format.
*/
vpx_image *VideoFrame::toVpxImage() vpx_image *VideoFrame::toVpxImage()
{ {
vpx_image* img = vpx_img_alloc(nullptr, VPX_IMG_FMT_I420, width, height, 0); vpx_image* img = vpx_img_alloc(nullptr, VPX_IMG_FMT_I420, width, height, 0);
@ -240,6 +265,12 @@ bool VideoFrame::convertToYUV420()
return true; return true;
} }
/**
@brief Frees all frame memory.
Frees all internal buffers and frame data, removes the freelistCallback
This makes all converted objects that shares our internal buffers invalid.
*/
void VideoFrame::releaseFrame() void VideoFrame::releaseFrame()
{ {
QMutexLocker locker(&biglock); QMutexLocker locker(&biglock);
@ -269,6 +300,10 @@ void VideoFrame::releaseFrameLockless()
} }
} }
/**
@brief Return the size of the original frame
@return The size of the original frame
*/
QSize VideoFrame::getSize() QSize VideoFrame::getSize()
{ {
return {width, height}; return {width, height};

View File

@ -28,12 +28,6 @@ struct AVFrame;
struct AVCodecContext; struct AVCodecContext;
struct vpx_image; struct vpx_image;
/// VideoFrame takes ownership of an AVFrame* and allows fast conversions to other formats
/// Ownership of all video frame buffers is kept by the VideoFrame, even after conversion
/// All references to the frame data become invalid when the VideoFrame is deleted
/// We try to avoid pixel format conversions as much as possible, at the cost of some memory
/// All methods are thread-safe. If provided freelistCallback will be called by the destructor,
/// unless releaseFrame was called in between.
class VideoFrame class VideoFrame
{ {
public: public:
@ -42,17 +36,11 @@ public:
VideoFrame(AVFrame* frame, int w, int h, int fmt, std::function<void()> freelistCallback); VideoFrame(AVFrame* frame, int w, int h, int fmt, std::function<void()> freelistCallback);
~VideoFrame(); ~VideoFrame();
/// Return the size of the original frame
QSize getSize(); QSize getSize();
/// Frees all internal buffers and frame data, removes the freelistCallback
/// This makes all converted objects that shares our internal buffers invalid
void releaseFrame(); void releaseFrame();
/// Converts the VideoFrame to a QImage that shares our internal video buffer
QImage toQImage(QSize size = QSize()); QImage toQImage(QSize size = QSize());
/// Converts the VideoFrame to a vpx_image_t that shares our internal video buffer
/// Free it with operator delete, NOT vpx_img_free
vpx_image* toVpxImage(); vpx_image* toVpxImage();
protected: protected:
@ -61,7 +49,6 @@ protected:
void releaseFrameLockless(); void releaseFrameLockless();
private: private:
// Disable copy. Use a shared_ptr if you need copies.
VideoFrame(const VideoFrame& other)=delete; VideoFrame(const VideoFrame& other)=delete;
VideoFrame& operator=(const VideoFrame& other)=delete; VideoFrame& operator=(const VideoFrame& other)=delete;

View File

@ -25,24 +25,37 @@
class VideoFrame; class VideoFrame;
/// An abstract source of video frames /**
/// When it has at least one subscriber the source will emit new video frames @brief An abstract source of video frames
/// Subscribing is recursive, multiple users can subscribe to the same VideoSource
When it has at least one subscriber the source will emit new video frames
Subscribing is recursive, multiple users can subscribe to the same VideoSource
*/
class VideoSource : public QObject class VideoSource : public QObject
{ {
Q_OBJECT Q_OBJECT
public: public:
virtual ~VideoSource() = default; virtual ~VideoSource() = default;
/// If subscribe sucessfully opens the source, it will start emitting frameAvailable signals /**
If subscribe sucessfully opens the source, it will start emitting frameAvailable signals.
*/
virtual bool subscribe() = 0; virtual bool subscribe() = 0;
/// Stop emitting frameAvailable signals, and free associated resources if necessary /**
Stop emitting frameAvailable signals, and free associated resources if necessary.
*/
virtual void unsubscribe() = 0; virtual void unsubscribe() = 0;
signals: signals:
/**
Emitted when new frame available to use.
@param frame New frame.
*/
void frameAvailable(std::shared_ptr<VideoFrame> frame); void frameAvailable(std::shared_ptr<VideoFrame> frame);
/// Emitted when the source is stopped for an indefinite amount of time, /**
/// but might restart sending frames again later Emitted when the source is stopped for an indefinite amount of time,
but might restart sending frames again later
*/
void sourceStopped(); void sourceStopped();
}; };

View File

@ -30,6 +30,11 @@
#include <QLabel> #include <QLabel>
#include <QDebug> #include <QDebug>
/**
@var std::atomic_bool VideoSurface::frameLock
@brief Fast lock for lastFrame.
*/
float getSizeRatio(const QSize size) float getSizeRatio(const QSize size)
{ {
return size.width() / static_cast<float>(size.height()); return size.width() / static_cast<float>(size.height());
@ -63,6 +68,13 @@ bool VideoSurface::isExpanding() const
return expanding; return expanding;
} }
/**
@brief Update source.
@note nullptr is a valid option.
@param src source to set.
Unsubscribe from old source and subscribe to new.
*/
void VideoSurface::setSource(VideoSource *src) void VideoSurface::setSource(VideoSource *src)
{ {
if (source == src) if (source == src)

View File

@ -35,7 +35,7 @@ public:
~VideoSurface(); ~VideoSurface();
bool isExpanding() const; bool isExpanding() const;
void setSource(VideoSource* src); //NULL is a valid option void setSource(VideoSource* src);
QRect getBoundingRect() const; QRect getBoundingRect() const;
float getRatio() const; float getRatio() const;
void setAvatar(const QPixmap& pixmap); void setAvatar(const QPixmap& pixmap);
@ -65,7 +65,7 @@ private:
QRect boundingRect; QRect boundingRect;
VideoSource* source; VideoSource* source;
std::shared_ptr<VideoFrame> lastFrame; std::shared_ptr<VideoFrame> lastFrame;
std::atomic_bool frameLock; ///< Fast lock for lastFrame std::atomic_bool frameLock;
uint8_t hasSubscribed; uint8_t hasSubscribed;
QPixmap avatar; QPixmap avatar;
float ratio; float ratio;