1
0
mirror of https://github.com/qTox/qTox.git synced 2024-03-22 14:00:36 +08:00

docs(audio, video): Change comment style

This commit is contained in:
Diadlo 2016-07-27 01:18:57 +03:00
parent 3edd84c4d8
commit 29443040fb
No known key found for this signature in database
GPG Key ID: 5AF9F2E29107C727
15 changed files with 357 additions and 128 deletions

View File

@ -33,8 +33,6 @@
#include <cassert>
/**
@internal
@class Audio::Private
@brief Encapsulates private audio framework from public qTox Audio API.
@ -88,7 +86,28 @@ private:
};
/**
Returns the singleton instance.
@class Audio
@fn void Audio::frameAvailable(const int16_t *pcm, size_t sample_count, uint8_t channels, uint32_t sampling_rate);
When there are input subscribers, we regularly emit captured audio frames with this signal
Always connect with a blocking queued connection lambda, else the behaviour is undefined
@var Audio::AUDIO_SAMPLE_RATE
@brief The next best Opus would take is 24k
@var Audio::AUDIO_FRAME_DURATION
@brief In milliseconds
@var Audio::AUDIO_FRAME_SAMPLE_COUNT
@brief Frame sample count
@var Audio::AUDIO_CHANNELS
@brief Ideally, we'd auto-detect, but that's a sane default
*/
/**
@brief Returns the singleton instance.
*/
Audio& Audio::getInstance()
{
@ -150,7 +169,7 @@ void Audio::checkAlcError(ALCdevice *device) noexcept
}
/**
Returns the current output volume (between 0 and 1)
@brief Returns the current output volume (between 0 and 1)
*/
qreal Audio::outputVolume() const
{
@ -168,7 +187,7 @@ qreal Audio::outputVolume() const
}
/**
Set the master output volume.
@brief Set the master output volume.
@param[in] volume the master volume (between 0 and 1)
*/
@ -238,7 +257,7 @@ qreal Audio::inputGain() const
}
/**
Set the input gain dB level.
@brief Set the input gain dB level.
*/
void Audio::setInputGain(qreal dB)
{
@ -299,7 +318,7 @@ void Audio::unsubscribeInput()
}
/**
Initialize audio input device, if not initialized.
@brief Initialize audio input device, if not initialized.
@return true, if device was initialized; false otherwise
*/
@ -309,7 +328,7 @@ bool Audio::autoInitInput()
}
/**
Initialize audio output device, if not initialized.
@brief Initialize audio output device, if not initialized.
@return true, if device was initialized; false otherwise
*/
@ -354,9 +373,7 @@ bool Audio::initInput(const QString& deviceName)
}
/**
@internal
Open an audio output device
@brief Open an audio output device
*/
bool Audio::initOutput(const QString& deviceName)
{
@ -409,7 +426,7 @@ bool Audio::initOutput(const QString& deviceName)
}
/**
Play a 44100Hz mono 16bit PCM sound from a file
@brief Play a 44100Hz mono 16bit PCM sound from a file
*/
void Audio::playMono16Sound(const QString& path)
{
@ -419,7 +436,7 @@ void Audio::playMono16Sound(const QString& path)
}
/**
Play a 44100Hz mono 16bit PCM sound
@brief Play a 44100Hz mono 16bit PCM sound
*/
void Audio::playMono16Sound(const QByteArray& data)
{
@ -488,9 +505,7 @@ void Audio::playAudioBuffer(ALuint alSource, const int16_t *data, int samples, u
}
/**
@internal
Close active audio input device.
@brief Close active audio input device.
*/
void Audio::cleanupInput()
{
@ -506,9 +521,7 @@ void Audio::cleanupInput()
}
/**
@internal
Close active audio output device
@brief Close active audio output device
*/
void Audio::cleanupOutput()
{
@ -540,6 +553,9 @@ void Audio::cleanupOutput()
}
}
/**
@brief Called after a mono16 sound stopped playing
*/
void Audio::playMono16SoundCleanup()
{
QMutexLocker locker(&audioLock);
@ -554,6 +570,9 @@ void Audio::playMono16SoundCleanup()
}
}
/**
@brief Called on the captureTimer events to capture audio
*/
void Audio::doCapture()
{
QMutexLocker lock(&audioLock);
@ -583,7 +602,7 @@ void Audio::doCapture()
}
/**
Returns true if the output device is open
@brief Returns true if the output device is open
*/
bool Audio::isOutputReady() const
{

View File

@ -42,13 +42,6 @@
#include <AL/alext.h>
#endif
// Public default audio settings
static constexpr uint32_t AUDIO_SAMPLE_RATE = 48000; ///< The next best Opus would take is 24k
static constexpr uint32_t AUDIO_FRAME_DURATION = 20; ///< In milliseconds
static constexpr ALint AUDIO_FRAME_SAMPLE_COUNT = AUDIO_FRAME_DURATION * AUDIO_SAMPLE_RATE/1000;
static constexpr uint32_t AUDIO_CHANNELS = 2; ///< Ideally, we'd auto-detect, but that's a sane default
class Audio : public QObject
{
Q_OBJECT
@ -92,10 +85,15 @@ public:
void playAudioBuffer(ALuint alSource, const int16_t *data, int samples,
unsigned channels, int sampleRate);
public:
// Public default audio settings
static constexpr uint32_t AUDIO_SAMPLE_RATE = 48000;
static constexpr uint32_t AUDIO_FRAME_DURATION = 20;
static constexpr ALint AUDIO_FRAME_SAMPLE_COUNT = AUDIO_FRAME_DURATION * AUDIO_SAMPLE_RATE/1000;
static constexpr uint32_t AUDIO_CHANNELS = 2;
signals:
void groupAudioPlayed(int group, int peer, unsigned short volume);
/// When there are input subscribers, we regularly emit captured audio frames with this signal
/// Always connect with a blocking queued connection or a lambda, or the behavior is undefined
void frameAvailable(const int16_t *pcm, size_t sample_count, uint8_t channels, uint32_t sampling_rate);
private:
@ -111,12 +109,9 @@ private:
bool initOutput(const QString& outDevDescr);
void cleanupInput();
void cleanupOutput();
/// Called after a mono16 sound stopped playing
void playMono16SoundCleanup();
/// Called on the captureTimer events to capture audio
void doCapture();
private:
Private* d;

View File

@ -38,6 +38,28 @@ extern "C" {
#include "src/platform/camera/avfoundation.h"
#endif
/**
@class CameraDevice
Maintains an FFmpeg context for open camera devices,
takes care of sharing the context accross users and closing
the camera device when not in use. The device can be opened
recursively, and must then be closed recursively
*/
/**
@var const QString CameraDevice::devName
@brief Short name of the device
@var AVFormatContext* CameraDevice::context
@brief Context of the open device, must always be valid
@var std::atomic_int CameraDevice::refcount;
@brief Number of times the device was opened
*/
QHash<QString, CameraDevice*> CameraDevice::openDevices;
QMutex CameraDevice::openDeviceLock, CameraDevice::iformatLock;
AVInputFormat* CameraDevice::iformat{nullptr};
@ -103,6 +125,18 @@ out:
return dev;
}
/**
@brief Opens a device.
Opens a device, creating a new one if needed
If the device is alreay open in another mode, the mode
will be ignored and the existing device is used
If the mode does not exist, a new device can't be opened.
@param devName Device name to open.
@param mode Mode of device to open.
@return CameraDevice if the device could be opened, nullptr otherwise.
*/
CameraDevice* CameraDevice::open(QString devName, VideoMode mode)
{
if (!getDefaultInputFormat())
@ -205,11 +239,20 @@ CameraDevice* CameraDevice::open(QString devName, VideoMode mode)
return dev;
}
/**
@brief Opens the device again. Never fails
*/
void CameraDevice::open()
{
++refcount;
}
/**
@brief Closes the device. Never fails.
@note If returns true, "this" becomes invalid.
@return True, if device finally deleted (closed last reference),
false otherwise (if other references exist).
*/
bool CameraDevice::close()
{
if (--refcount > 0)
@ -223,6 +266,11 @@ bool CameraDevice::close()
return true;
}
/**
@brief Get raw device list
@note Uses avdevice_list_devices
@return Raw device list
*/
QVector<QPair<QString, QString>> CameraDevice::getRawDeviceListGeneric()
{
QVector<QPair<QString, QString>> devices;
@ -234,11 +282,13 @@ QVector<QPair<QString, QString>> CameraDevice::getRawDeviceListGeneric()
AVFormatContext *s;
if (!(s = avformat_alloc_context()))
return devices;
if (!iformat->priv_class || !AV_IS_INPUT_DEVICE(iformat->priv_class->category))
{
avformat_free_context(s);
return devices;
}
s->iformat = iformat;
if (s->iformat->priv_data_size > 0)
{
@ -280,7 +330,7 @@ QVector<QPair<QString, QString>> CameraDevice::getRawDeviceListGeneric()
// Convert the list to a QVector
devices.resize(devlist->nb_devices);
for (int i=0; i<devlist->nb_devices; i++)
for (int i = 0; i < devlist->nb_devices; i++)
{
AVDeviceInfo* dev = devlist->devices[i];
devices[i].first = dev->device_name;
@ -290,6 +340,11 @@ QVector<QPair<QString, QString>> CameraDevice::getRawDeviceListGeneric()
return devices;
}
/**
@brief Get device list with desciption
@return A list of device names and descriptions.
The names are the first part of the pair and can be passed to open(QString).
*/
QVector<QPair<QString, QString>> CameraDevice::getDeviceList()
{
QVector<QPair<QString, QString>> devices;
@ -336,6 +391,11 @@ QVector<QPair<QString, QString>> CameraDevice::getDeviceList()
return devices;
}
/**
@brief Get the default device name.
@return The short name of the default device
This is either the device in the settings or the system default.
*/
QString CameraDevice::getDefaultDeviceName()
{
QString defaultdev = Settings::getInstance().getVideoDev();
@ -354,11 +414,20 @@ QString CameraDevice::getDefaultDeviceName()
return devlist[0].first;
}
/**
@brief Checks if a device name specifies a display.
@param devName Device name to check.
@return True, if device is screen, false otherwise.
*/
bool CameraDevice::isScreen(const QString &devName)
{
return devName.startsWith("x11grab") || devName.startsWith("gdigrab");
}
/**
@brief Get list of resolutions and position of screens
@return Vector of avaliable screen modes with offset
*/
QVector<VideoMode> CameraDevice::getScreenModes()
{
QList<QScreen*> screens = QApplication::screens();
@ -376,6 +445,11 @@ QVector<VideoMode> CameraDevice::getScreenModes()
return result;
}
/**
@brief Get the list of video modes for a device.
@param devName Device name to get nodes from.
@return Vector of available modes for the device.
*/
QVector<VideoMode> CameraDevice::getVideoModes(QString devName)
{
Q_UNUSED(devName);
@ -401,6 +475,11 @@ QVector<VideoMode> CameraDevice::getVideoModes(QString devName)
return {};
}
/**
@brief Get the name of the pixel format of a video mode.
@param pixel_format Pixel format to get the name from.
@return Name of the pixel format.
*/
QString CameraDevice::getPixelFormatString(uint32_t pixel_format)
{
#ifdef Q_OS_LINUX
@ -410,6 +489,13 @@ QString CameraDevice::getPixelFormatString(uint32_t pixel_format)
#endif
}
/**
@brief Compare two pixel formats.
@param a First pixel format to compare.
@param b Second pixel format to compare.
@return True if we prefer format a to b,
false otherwise (such as if there's no preference).
*/
bool CameraDevice::betterPixelFormat(uint32_t a, uint32_t b)
{
#ifdef Q_OS_LINUX
@ -419,6 +505,10 @@ bool CameraDevice::betterPixelFormat(uint32_t a, uint32_t b)
#endif
}
/**
@brief Sets CameraDevice::iformat to default.
@return True if success, false if failure.
*/
bool CameraDevice::getDefaultInputFormat()
{
QMutexLocker locker(&iformatLock);

View File

@ -33,55 +33,36 @@ struct AVInputFormat;
struct AVDeviceInfoList;
struct AVDictionary;
/// Maintains an FFmpeg context for open camera devices,
/// takes care of sharing the context accross users
/// and closing the camera device when not in use.
/// The device can be opened recursively,
/// and must then be closed recursively
class CameraDevice
{
public:
/// Opens a device, creating a new one if needed
/// If the device is alreay open in another mode, the mode
/// will be ignored and the existing device is used
/// If the mode does not exist, a new device can't be opened
/// Returns a nullptr if the device couldn't be opened
static CameraDevice* open(QString devName, VideoMode mode = VideoMode());
void open(); ///< Opens the device again. Never fails
bool close(); ///< Closes the device. Never fails. If returns true, "this" becomes invalid
void open();
bool close();
/// Returns a list of device names and descriptions
/// The names are the first part of the pair and can be passed to open(QString)
static QVector<QPair<QString, QString>> getDeviceList();
/// Get the list of video modes for a device
static QVector<VideoMode> getVideoModes(QString devName);
/// Get the name of the pixel format of a video mode
static QString getPixelFormatString(uint32_t pixel_format);
/// Returns true if we prefer format a to b, false otherwise (such as if there's no preference)
static bool betterPixelFormat(uint32_t a, uint32_t b);
/// Returns the short name of the default defice
/// This is either the device in the settings
/// or the system default.
static QString getDefaultDeviceName();
/// Checks if a device name specifies a display
static bool isScreen(const QString &devName);
private:
CameraDevice(const QString &devName, AVFormatContext *context);
static CameraDevice* open(QString devName, AVDictionary** options);
static bool getDefaultInputFormat(); ///< Sets CameraDevice::iformat, returns success/failure
static QVector<QPair<QString, QString> > getRawDeviceListGeneric(); ///< Uses avdevice_list_devices
static QVector<VideoMode> getScreenModes(); ///< Returns avaliable screen modes with offset
static bool getDefaultInputFormat();
static QVector<QPair<QString, QString> > getRawDeviceListGeneric();
static QVector<VideoMode> getScreenModes();
public:
const QString devName; ///< Short name of the device
AVFormatContext* context; ///< Context of the open device, must always be valid
const QString devName;
AVFormatContext* context;
private:
std::atomic_int refcount; ///< Number of times the device was opened
std::atomic_int refcount;
static QHash<QString, CameraDevice*> openDevices;
static QMutex openDeviceLock, iformatLock;
static AVInputFormat* iformat, *idesktopFormat;

View File

@ -33,6 +33,57 @@ extern "C" {
#include "cameradevice.h"
#include "videoframe.h"
/**
@class CameraSource
@brief This class is a wrapper to share a camera's captured video frames
It allows objects to suscribe and unsuscribe to the stream, starting
the camera and streaming new video frames only when needed.
This is a singleton, since we can only capture from one
camera at the same time without thread-safety issues.
The source is lazy in the sense that it will only keep the video
device open as long as there are subscribers, the source can be
open but the device closed if there are zero subscribers.
*/
/**
@var QVector<std::weak_ptr<VideoFrame>> CameraSource::freelist
@brief Frames that need freeing before we can safely close the device
@var QFuture<void> CameraSource::streamFuture
@brief Future of the streaming thread
@var QString CameraSource::deviceName
@brief Short name of the device for CameraDevice's open(QString)
@var CameraDevice* CameraSource::device
@brief Non-owning pointer to an open CameraDevice, or nullptr. Not atomic, synced with memfences when becomes null.
@var VideoMode CameraSource::mode
@brief What mode we tried to open the device in, all zeros means default mode
@var AVCodecContext* CameraSource::cctx
@brief Codec context of the camera's selected video stream
@var AVCodecContext* CameraSource::cctxOrig
@brief Codec context of the camera's selected video stream
@var int CameraSource::videoStreamIndex
@brief A camera can have multiple streams, this is the one we're decoding
@var QMutex CameraSource::biglock
@brief True when locked. Faster than mutexes for video decoding.
@var QMutex CameraSource::freelistLock
@brief True when locked. Faster than mutexes for video decoding.
@var std::atomic_bool CameraSource::streamBlocker
@brief Holds the streaming thread still when true
@var std::atomic_int CameraSource::subscriptions
@brief Remember how many times we subscribed for RAII
*/
CameraSource* CameraSource::instance{nullptr};
CameraSource::CameraSource()
@ -45,6 +96,9 @@ CameraSource::CameraSource()
avdevice_register_all();
}
/**
@brief Returns the singleton instance.
*/
CameraSource& CameraSource::getInstance()
{
if (!instance)
@ -61,6 +115,12 @@ void CameraSource::destroyInstance()
}
}
/**
@brief Opens the source for the camera device.
@note If a device is already open, the source will seamlessly switch to the new device.
Opens the source for the camera device in argument, in the settings, or the system default.
*/
void CameraSource::open()
{
open(CameraDevice::getDefaultDeviceName());
@ -103,6 +163,11 @@ void CameraSource::open(const QString& DeviceName, VideoMode Mode)
streamBlocker = false;
}
/**
@brief Stops streaming.
Equivalent to opening the source with the video device "none".
*/
void CameraSource::close()
{
open("none");
@ -214,6 +279,11 @@ void CameraSource::unsubscribe()
subscriptions--;
}
/**
@brief Opens the video device and starts streaming.
@note Callers must own the biglock.
@return True if success, false otherwise.
*/
bool CameraSource::openDevice()
{
qDebug() << "Opening device " << deviceName;
@ -296,6 +366,10 @@ bool CameraSource::openDevice()
return true;
}
/**
@brief Closes the video device and stops streaming.
@note Callers must own the biglock.
*/
void CameraSource::closeDevice()
{
qDebug() << "Closing device "<<deviceName;
@ -324,6 +398,10 @@ void CameraSource::closeDevice()
std::atomic_thread_fence(std::memory_order_release);
}
/**
@brief Blocking. Decodes video stream and emits new frames.
@note Designed to run in its own thread.
*/
void CameraSource::stream()
{
auto streamLoop = [=]()
@ -384,12 +462,28 @@ void CameraSource::stream()
}
}
/**
@brief CameraSource::freelistCallback
@param freelistIndex
All VideoFrames must be deleted or released before we can close the device
or the device will forcibly free them, and then ~VideoFrame() will double free.
In theory very careful coding from our users could ensure all VideoFrames
die before unsubscribing, even the ones currently in flight in the metatype system.
But that's just asking for trouble and mysterious crashes, so we'll just
maintain a freelist and have all VideoFrames tell us when they die so we can forget them.
*/
void CameraSource::freelistCallback(int freelistIndex)
{
QMutexLocker l{&freelistLock};
freelist[freelistIndex].reset();
}
/**
@brief Get the index of a free slot in the freelist.
@note Callers must hold the freelistLock.
@return Index of a free slot.
*/
int CameraSource::getFreelistSlotLockless()
{
int size = freelist.size();

View File

@ -31,17 +31,6 @@
class CameraDevice;
struct AVCodecContext;
/**
* This class is a wrapper to share a camera's captured video frames
* It allows objects to suscribe and unsuscribe to the stream, starting
* the camera and streaming new video frames only when needed.
* This is a singleton, since we can only capture from one
* camera at the same time without thread-safety issues.
* The source is lazy in the sense that it will only keep the video
* device open as long as there are subscribers, the source can be
* open but the device closed if there are zero subscribers.
**/
class CameraSource : public VideoSource
{
Q_OBJECT
@ -49,12 +38,10 @@ class CameraSource : public VideoSource
public:
static CameraSource& getInstance();
static void destroyInstance();
/// Opens the source for the camera device in argument, in the settings, or the system default
/// If a device is already open, the source will seamlessly switch to the new device
void open();
void open(const QString& deviceName);
void open(const QString& deviceName, VideoMode mode);
void close(); ///< Equivalent to opening the source with the video device "none". Stops streaming.
void close();
bool isOpen();
// VideoSource interface
@ -67,34 +54,24 @@ signals:
private:
CameraSource();
~CameraSource();
/// Blocking. Decodes video stream and emits new frames.
/// Designed to run in its own thread.
void stream();
/// All VideoFrames must be deleted or released before we can close the device
/// or the device will forcibly free them, and then ~VideoFrame() will double free.
/// In theory very careful coding from our users could ensure all VideoFrames
/// die before unsubscribing, even the ones currently in flight in the metatype system.
/// But that's just asking for trouble and mysterious crashes, so we'll just
/// maintain a freelist and have all VideoFrames tell us when they die so we can forget them.
void freelistCallback(int freelistIndex);
/// Get the index of a free slot in the freelist
/// Callers must hold the freelistLock
int getFreelistSlotLockless();
bool openDevice(); ///< Callers must own the biglock. Actually opens the video device and starts streaming.
void closeDevice(); ///< Callers must own the biglock. Actually closes the video device and stops streaming.
bool openDevice();
void closeDevice();
private:
QVector<std::weak_ptr<VideoFrame>> freelist; ///< Frames that need freeing before we can safely close the device
QFuture<void> streamFuture; ///< Future of the streaming thread
QString deviceName; ///< Short name of the device for CameraDevice's open(QString)
CameraDevice* device; ///< Non-owning pointer to an open CameraDevice, or nullptr. Not atomic, synced with memfences when becomes null.
VideoMode mode; ///< What mode we tried to open the device in, all zeros means default mode
AVCodecContext* cctx, *cctxOrig; ///< Codec context of the camera's selected video stream
int videoStreamIndex; ///< A camera can have multiple streams, this is the one we're decoding
QMutex biglock, freelistLock; ///< True when locked. Faster than mutexes for video decoding.
QVector<std::weak_ptr<VideoFrame>> freelist;
QFuture<void> streamFuture;
QString deviceName;
CameraDevice* device;
VideoMode mode;
AVCodecContext* cctx, *cctxOrig;
int videoStreamIndex;
QMutex biglock, freelistLock;
std::atomic_bool _isOpen;
std::atomic_bool streamBlocker; ///< Holds the streaming thread still when true
std::atomic_int subscriptions; ///< Remember how many times we subscribed for RAII
std::atomic_bool streamBlocker;
std::atomic_int subscriptions;
static CameraSource* instance;
};

View File

@ -25,12 +25,34 @@ extern "C" {
#include "corevideosource.h"
#include "videoframe.h"
/**
@class CoreVideoSource
@brief A VideoSource that emits frames received by Core.
*/
/**
@var std::atomic_int subscribers
@brief Number of suscribers
@var std::atomic_bool deleteOnClose
@brief If true, self-delete after the last suscriber is gone
*/
/**
@brief CoreVideoSource constructor.
@note Only CoreAV should create a CoreVideoSource since
only CoreAV can push images to it.
*/
CoreVideoSource::CoreVideoSource()
: subscribers{0}, deleteOnClose{false},
stopped{false}
{
}
/**
@brief Makes a copy of the vpx_image_t and emits it as a new VideoFrame.
@param vpxframe Frame to copy.
*/
void CoreVideoSource::pushFrame(const vpx_image_t* vpxframe)
{
if (stopped)
@ -108,12 +130,22 @@ void CoreVideoSource::unsubscribe()
biglock.unlock();
}
/**
@brief Setup delete on close
@param If true, self-delete after the last suscriber is gone
*/
void CoreVideoSource::setDeleteOnClose(bool newstate)
{
QMutexLocker locker(&biglock);
deleteOnClose = newstate;
}
/**
@brief Stopping the source.
@see The callers in CoreAV for the rationale
Stopping the source will block any pushFrame calls from doing anything
*/
void CoreVideoSource::stopSource()
{
QMutexLocker locker(&biglock);

View File

@ -26,7 +26,6 @@
#include "videosource.h"
#include <QMutex>
/// A VideoSource that emits frames received by Core
class CoreVideoSource : public VideoSource
{
Q_OBJECT
@ -36,23 +35,17 @@ public:
virtual void unsubscribe() override;
private:
// Only CoreAV should create a CoreVideoSource since
// only CoreAV can push images to it
CoreVideoSource();
/// Makes a copy of the vpx_image_t and emits it as a new VideoFrame
void pushFrame(const vpx_image_t *frame);
/// If true, self-delete after the last suscriber is gone
void setDeleteOnClose(bool newstate);
/// Stopping the source will block any pushFrame calls from doing anything
/// See the callers in CoreAV for the rationale
void stopSource();
void restartSource();
private:
std::atomic_int subscribers; ///< Number of suscribers
std::atomic_bool deleteOnClose; ///< If true, self-delete after the last suscriber is gone
std::atomic_int subscribers;
std::atomic_bool deleteOnClose;
QMutex biglock;
std::atomic_bool stopped;

View File

@ -35,6 +35,7 @@ NetCamView::NetCamView(int friendId, QWidget* parent)
: GenericNetCamView(parent)
, selfFrame{nullptr}
, friendId{friendId}
, e(false)
{
QString id = FriendList::findFriend(friendId)->getToxId().toString();
videoSurface = new VideoSurface(Nexus::getProfile()->loadAvatar(id), this);

View File

@ -55,7 +55,7 @@ private:
VideoSurface* selfVideoSurface;
MovableWidget* selfFrame;
int friendId;
bool e = false;
bool e;
QVector<QMetaObject::Connection> connections;
};

View File

@ -30,6 +30,17 @@ extern "C" {
#include "videoframe.h"
#include "camerasource.h"
/**
@class VideoFrame
VideoFrame takes ownership of an AVFrame* and allows fast conversions to other formats
Ownership of all video frame buffers is kept by the VideoFrame, even after conversion
All references to the frame data become invalid when the VideoFrame is deleted
We try to avoid pixel format conversions as much as possible, at the cost of some memory
All methods are thread-safe. If provided freelistCallback will be called by the destructor,
unless releaseFrame was called in between.
*/
VideoFrame::VideoFrame(AVFrame* frame, int w, int h, int fmt, std::function<void()> freelistCallback)
: freelistCallback{freelistCallback},
frameOther{nullptr}, frameYUV420{nullptr}, frameRGB24{nullptr},
@ -70,6 +81,10 @@ VideoFrame::VideoFrame(AVFrame* frame)
{
}
/**
@brief VideoFrame constructor. Disable copy.
@note Use a shared_ptr if you need copies.
*/
VideoFrame::~VideoFrame()
{
if (freelistCallback)
@ -78,6 +93,11 @@ VideoFrame::~VideoFrame()
releaseFrameLockless();
}
/**
@brief Converts the VideoFrame to a QImage that shares our internal video buffer.
@param size Size of resulting image.
@return Converted image to RGB24 color model.
*/
QImage VideoFrame::toQImage(QSize size)
{
if (!convertToRGB24(size))
@ -88,6 +108,11 @@ QImage VideoFrame::toQImage(QSize size)
return QImage(*frameRGB24->data, frameRGB24->width, frameRGB24->height, *frameRGB24->linesize, QImage::Format_RGB888);
}
/**
@brief Converts the VideoFrame to a vpx_image_t.
Converts the VideoFrame to a vpx_image_t that shares our internal video buffer.
@return Converted image to vpx_image format.
*/
vpx_image *VideoFrame::toVpxImage()
{
vpx_image* img = vpx_img_alloc(nullptr, VPX_IMG_FMT_I420, width, height, 0);
@ -240,6 +265,12 @@ bool VideoFrame::convertToYUV420()
return true;
}
/**
@brief Frees all frame memory.
Frees all internal buffers and frame data, removes the freelistCallback
This makes all converted objects that shares our internal buffers invalid.
*/
void VideoFrame::releaseFrame()
{
QMutexLocker locker(&biglock);
@ -269,6 +300,10 @@ void VideoFrame::releaseFrameLockless()
}
}
/**
@brief Return the size of the original frame
@return The size of the original frame
*/
QSize VideoFrame::getSize()
{
return {width, height};

View File

@ -28,12 +28,6 @@ struct AVFrame;
struct AVCodecContext;
struct vpx_image;
/// VideoFrame takes ownership of an AVFrame* and allows fast conversions to other formats
/// Ownership of all video frame buffers is kept by the VideoFrame, even after conversion
/// All references to the frame data become invalid when the VideoFrame is deleted
/// We try to avoid pixel format conversions as much as possible, at the cost of some memory
/// All methods are thread-safe. If provided freelistCallback will be called by the destructor,
/// unless releaseFrame was called in between.
class VideoFrame
{
public:
@ -42,17 +36,11 @@ public:
VideoFrame(AVFrame* frame, int w, int h, int fmt, std::function<void()> freelistCallback);
~VideoFrame();
/// Return the size of the original frame
QSize getSize();
/// Frees all internal buffers and frame data, removes the freelistCallback
/// This makes all converted objects that shares our internal buffers invalid
void releaseFrame();
/// Converts the VideoFrame to a QImage that shares our internal video buffer
QImage toQImage(QSize size = QSize());
/// Converts the VideoFrame to a vpx_image_t that shares our internal video buffer
/// Free it with operator delete, NOT vpx_img_free
vpx_image* toVpxImage();
protected:
@ -61,7 +49,6 @@ protected:
void releaseFrameLockless();
private:
// Disable copy. Use a shared_ptr if you need copies.
VideoFrame(const VideoFrame& other)=delete;
VideoFrame& operator=(const VideoFrame& other)=delete;

View File

@ -25,24 +25,37 @@
class VideoFrame;
/// An abstract source of video frames
/// When it has at least one subscriber the source will emit new video frames
/// Subscribing is recursive, multiple users can subscribe to the same VideoSource
/**
@brief An abstract source of video frames
When it has at least one subscriber the source will emit new video frames
Subscribing is recursive, multiple users can subscribe to the same VideoSource
*/
class VideoSource : public QObject
{
Q_OBJECT
public:
virtual ~VideoSource() = default;
/// If subscribe sucessfully opens the source, it will start emitting frameAvailable signals
/**
If subscribe sucessfully opens the source, it will start emitting frameAvailable signals.
*/
virtual bool subscribe() = 0;
/// Stop emitting frameAvailable signals, and free associated resources if necessary
/**
Stop emitting frameAvailable signals, and free associated resources if necessary.
*/
virtual void unsubscribe() = 0;
signals:
/**
Emitted when new frame available to use.
@param frame New frame.
*/
void frameAvailable(std::shared_ptr<VideoFrame> frame);
/// Emitted when the source is stopped for an indefinite amount of time,
/// but might restart sending frames again later
/**
Emitted when the source is stopped for an indefinite amount of time,
but might restart sending frames again later
*/
void sourceStopped();
};

View File

@ -30,6 +30,11 @@
#include <QLabel>
#include <QDebug>
/**
@var std::atomic_bool VideoSurface::frameLock
@brief Fast lock for lastFrame.
*/
float getSizeRatio(const QSize size)
{
return size.width() / static_cast<float>(size.height());
@ -63,6 +68,13 @@ bool VideoSurface::isExpanding() const
return expanding;
}
/**
@brief Update source.
@note nullptr is a valid option.
@param src source to set.
Unsubscribe from old source and subscribe to new.
*/
void VideoSurface::setSource(VideoSource *src)
{
if (source == src)

View File

@ -35,7 +35,7 @@ public:
~VideoSurface();
bool isExpanding() const;
void setSource(VideoSource* src); //NULL is a valid option
void setSource(VideoSource* src);
QRect getBoundingRect() const;
float getRatio() const;
void setAvatar(const QPixmap& pixmap);
@ -65,7 +65,7 @@ private:
QRect boundingRect;
VideoSource* source;
std::shared_ptr<VideoFrame> lastFrame;
std::atomic_bool frameLock; ///< Fast lock for lastFrame
std::atomic_bool frameLock;
uint8_t hasSubscribed;
QPixmap avatar;
float ratio;