2014-06-30 05:41:47 +08:00
|
|
|
#include "camera.h"
|
|
|
|
#include <QVideoSurfaceFormat>
|
|
|
|
#include <QMessageBox>
|
2014-06-30 22:17:20 +08:00
|
|
|
#include <QVideoEncoderSettings>
|
|
|
|
#include <QVideoEncoderSettingsControl>
|
2014-06-30 05:41:47 +08:00
|
|
|
|
2014-07-03 01:01:00 +08:00
|
|
|
static inline void fromYCbCrToRGB(
|
|
|
|
uint8_t Y, uint8_t Cb, uint8_t Cr,
|
|
|
|
uint8_t& R, uint8_t& G, uint8_t& B)
|
2014-07-01 18:06:59 +08:00
|
|
|
{
|
2014-07-03 01:01:00 +08:00
|
|
|
int r = Y + ((1436 * (Cr - 128)) >> 10),
|
|
|
|
g = Y - ((354 * (Cb - 128) + 732 * (Cr - 128)) >> 10),
|
|
|
|
b = Y + ((1814 * (Cb - 128)) >> 10);
|
|
|
|
|
|
|
|
if(r < 0) {
|
|
|
|
r = 0;
|
|
|
|
} else if(r > 255) {
|
|
|
|
r = 255;
|
|
|
|
}
|
2014-07-01 18:06:59 +08:00
|
|
|
|
2014-07-03 01:01:00 +08:00
|
|
|
if(g < 0) {
|
|
|
|
g = 0;
|
|
|
|
} else if(g > 255) {
|
|
|
|
g = 255;
|
|
|
|
}
|
2014-07-01 18:06:59 +08:00
|
|
|
|
2014-07-03 01:01:00 +08:00
|
|
|
if(b < 0) {
|
|
|
|
b = 0;
|
|
|
|
} else if(b > 255) {
|
|
|
|
b = 255;
|
|
|
|
}
|
|
|
|
|
|
|
|
R = static_cast<uint8_t>(r);
|
|
|
|
G = static_cast<uint8_t>(g);
|
|
|
|
B = static_cast<uint8_t>(b);
|
2014-07-01 18:06:59 +08:00
|
|
|
}
|
|
|
|
|
2014-06-30 05:41:47 +08:00
|
|
|
Camera::Camera()
|
|
|
|
: refcount{0}, camera{new QCamera}
|
|
|
|
{
|
|
|
|
camera->setCaptureMode(QCamera::CaptureVideo);
|
|
|
|
camera->setViewfinder(this);
|
|
|
|
|
2014-07-01 01:50:55 +08:00
|
|
|
/* CRASHES ON WINDOWS !
|
2014-06-30 22:17:20 +08:00
|
|
|
QMediaService *m = camera->service();
|
|
|
|
QVideoEncoderSettingsControl *enc = m->requestControl<QVideoEncoderSettingsControl*>();
|
|
|
|
QVideoEncoderSettings sets = enc->videoSettings();
|
|
|
|
sets.setResolution(640, 480);
|
|
|
|
enc->setVideoSettings(sets);
|
2014-07-01 01:50:55 +08:00
|
|
|
*/
|
2014-06-30 22:17:20 +08:00
|
|
|
|
2014-06-30 05:41:47 +08:00
|
|
|
connect(camera, SIGNAL(error(QCamera::Error)), this, SLOT(onCameraError(QCamera::Error)));
|
|
|
|
|
|
|
|
supportedFormats << QVideoFrame::Format_YUV420P << QVideoFrame::Format_YV12 << QVideoFrame::Format_RGB32;
|
|
|
|
}
|
|
|
|
|
|
|
|
void Camera::suscribe()
|
|
|
|
{
|
|
|
|
if (refcount <= 0)
|
|
|
|
{
|
|
|
|
refcount = 1;
|
|
|
|
camera->start();
|
|
|
|
}
|
|
|
|
else
|
|
|
|
refcount++;
|
|
|
|
}
|
|
|
|
|
|
|
|
void Camera::unsuscribe()
|
|
|
|
{
|
|
|
|
refcount--;
|
|
|
|
|
|
|
|
if (refcount <= 0)
|
|
|
|
{
|
|
|
|
camera->stop();
|
|
|
|
refcount = 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
QVideoFrame Camera::getLastFrame()
|
|
|
|
{
|
|
|
|
return lastFrame;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool Camera::start(const QVideoSurfaceFormat &format)
|
|
|
|
{
|
|
|
|
if(supportedFormats.contains(format.pixelFormat()))
|
|
|
|
{
|
|
|
|
frameFormat = format.pixelFormat();
|
|
|
|
QAbstractVideoSurface::start(format);
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
QMessageBox::warning(0, "Camera error", "The camera only supports rare video formats, can't use it");
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
bool Camera::present(const QVideoFrame &frame)
|
|
|
|
{
|
|
|
|
QVideoFrame frameMap(frame); // Basically a const_cast because shallow copies
|
2014-07-02 04:30:56 +08:00
|
|
|
if (!frameMap.map(QAbstractVideoBuffer::ReadOnly))
|
|
|
|
{
|
|
|
|
qWarning() << "Camera::present: Unable to map frame";
|
|
|
|
return false;
|
|
|
|
}
|
2014-06-30 05:41:47 +08:00
|
|
|
int w = frameMap.width(), h = frameMap.height();
|
|
|
|
int bpl = frameMap.bytesPerLine(), size = frameMap.mappedBytes();
|
|
|
|
QVideoFrame frameCopy(size, QSize(w, h), bpl, frameMap.pixelFormat());
|
|
|
|
frameCopy.map(QAbstractVideoBuffer::WriteOnly);
|
|
|
|
memcpy(frameCopy.bits(), frameMap.bits(), size);
|
|
|
|
frameCopy.unmap();
|
|
|
|
lastFrame = frameCopy;
|
|
|
|
frameMap.unmap();
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
QList<QVideoFrame::PixelFormat> Camera::supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const
|
|
|
|
{
|
|
|
|
if (handleType == QAbstractVideoBuffer::NoHandle)
|
|
|
|
return supportedFormats;
|
|
|
|
else
|
|
|
|
return QList<QVideoFrame::PixelFormat>();
|
|
|
|
}
|
|
|
|
|
|
|
|
void Camera::onCameraError(QCamera::Error value)
|
|
|
|
{
|
|
|
|
QMessageBox::warning(0,"Camera error",QString("Error %1 : %2")
|
|
|
|
.arg(value).arg(camera->errorString()));
|
|
|
|
}
|
|
|
|
|
|
|
|
bool Camera::isFormatSupported(const QVideoSurfaceFormat& format) const
|
|
|
|
{
|
|
|
|
if (format.pixelFormat() == 0)
|
|
|
|
{
|
|
|
|
//QMessageBox::warning(0, "Camera eror","The camera's video format is not supported !");
|
|
|
|
return QAbstractVideoSurface::isFormatSupported(format);
|
|
|
|
}
|
|
|
|
else if(supportedFormats.contains(format.pixelFormat()))
|
|
|
|
{
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2014-07-04 03:42:23 +08:00
|
|
|
QMessageBox::warning(0, tr("Camera eror"),
|
|
|
|
tr("Camera format %1 not supported, can't use the camera")
|
2014-06-30 05:41:47 +08:00
|
|
|
.arg(format.pixelFormat()));
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
QImage Camera::getLastImage()
|
|
|
|
{
|
2014-07-02 02:56:27 +08:00
|
|
|
if (!lastFrame.map(QAbstractVideoBuffer::ReadOnly))
|
|
|
|
{
|
|
|
|
qWarning() << "Camera::getLastImage: Error maping last frame";
|
|
|
|
return QImage();
|
|
|
|
}
|
2014-06-30 05:41:47 +08:00
|
|
|
int w = lastFrame.width(), h = lastFrame.height();
|
|
|
|
int bpl = lastFrame.bytesPerLine(), cxbpl = bpl/2;
|
|
|
|
QImage img(w, h, QImage::Format_RGB32);
|
|
|
|
|
|
|
|
if (frameFormat == QVideoFrame::Format_YUV420P)
|
|
|
|
{
|
|
|
|
uint8_t* yData = lastFrame.bits();
|
|
|
|
uint8_t* uData = yData + (bpl * h);
|
|
|
|
uint8_t* vData = uData + (bpl * h / 4);
|
|
|
|
for (int i = 0; i< h; i++)
|
|
|
|
{
|
|
|
|
uint32_t* scanline = (uint32_t*)img.scanLine(i);
|
|
|
|
for (int j=0; j < bpl; j++)
|
|
|
|
{
|
2014-07-03 01:01:00 +08:00
|
|
|
uint8_t Y = yData[i*bpl + j];
|
|
|
|
uint8_t U = uData[i/2*cxbpl + j/2];
|
|
|
|
uint8_t V = vData[i/2*cxbpl + j/2];
|
2014-06-30 05:41:47 +08:00
|
|
|
|
2014-07-03 01:01:00 +08:00
|
|
|
uint8_t R, G, B;
|
|
|
|
fromYCbCrToRGB(Y, U, V, R, G, B);
|
2014-06-30 05:41:47 +08:00
|
|
|
|
|
|
|
scanline[j] = (0xFF<<24) + (R<<16) + (G<<8) + B;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else if (frameFormat == QVideoFrame::Format_YV12)
|
|
|
|
{
|
|
|
|
uint8_t* yData = lastFrame.bits();
|
|
|
|
uint8_t* vData = yData + (bpl * h);
|
|
|
|
uint8_t* uData = vData + (bpl * h / 4);
|
|
|
|
for (int i = 0; i< h; i++)
|
|
|
|
{
|
|
|
|
uint32_t* scanline = (uint32_t*)img.scanLine(i);
|
|
|
|
for (int j=0; j < bpl; j++)
|
|
|
|
{
|
2014-07-03 01:01:00 +08:00
|
|
|
uint8_t Y = yData[i*bpl + j];
|
|
|
|
uint8_t U = uData[i/2*cxbpl + j/2];
|
|
|
|
uint8_t V = vData[i/2*cxbpl + j/2];
|
2014-06-30 05:41:47 +08:00
|
|
|
|
2014-07-03 01:01:00 +08:00
|
|
|
uint8_t R, G, B;
|
|
|
|
fromYCbCrToRGB(Y, U, V, R, G, B);
|
2014-06-30 05:41:47 +08:00
|
|
|
|
|
|
|
scanline[j] = (0xFF<<24) + (R<<16) + (G<<8) + B;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else if (frameFormat == QVideoFrame::Format_RGB32)
|
|
|
|
{
|
|
|
|
memcpy(img.bits(), lastFrame.bits(), bpl*h);
|
|
|
|
}
|
|
|
|
|
|
|
|
lastFrame.unmap();
|
|
|
|
return img;
|
|
|
|
}
|
2014-06-30 20:49:42 +08:00
|
|
|
|
|
|
|
vpx_image Camera::getLastVPXImage()
|
|
|
|
{
|
2014-07-01 05:48:12 +08:00
|
|
|
vpx_image img;
|
|
|
|
if (!lastFrame.isValid())
|
|
|
|
return img;
|
2014-07-03 04:17:39 +08:00
|
|
|
if (!lastFrame.map(QAbstractVideoBuffer::ReadOnly))
|
|
|
|
{
|
|
|
|
qWarning() << "Camera::getLastVPXImage: Error maping last frame";
|
|
|
|
return img;
|
|
|
|
}
|
2014-06-30 20:49:42 +08:00
|
|
|
int w = lastFrame.width(), h = lastFrame.height();
|
2014-07-01 04:52:03 +08:00
|
|
|
int bpl = lastFrame.bytesPerLine();
|
2014-06-30 20:49:42 +08:00
|
|
|
vpx_img_alloc(&img, VPX_IMG_FMT_I420, w, h, 1); // I420 == YUV420P, same as YV12 with U and V switched
|
|
|
|
|
|
|
|
if (frameFormat == QVideoFrame::Format_YUV420P)
|
|
|
|
{
|
|
|
|
uint8_t* yData = lastFrame.bits();
|
|
|
|
uint8_t* uData = yData + (bpl * h);
|
|
|
|
uint8_t* vData = uData + (bpl * h / 4);
|
|
|
|
img.planes[VPX_PLANE_Y] = yData;
|
|
|
|
img.planes[VPX_PLANE_U] = uData;
|
|
|
|
img.planes[VPX_PLANE_V] = vData;
|
|
|
|
}
|
|
|
|
else if (frameFormat == QVideoFrame::Format_YV12)
|
|
|
|
{
|
|
|
|
uint8_t* yData = lastFrame.bits();
|
|
|
|
uint8_t* uData = yData + (bpl * h);
|
|
|
|
uint8_t* vData = uData + (bpl * h / 4);
|
|
|
|
img.planes[VPX_PLANE_Y] = yData;
|
|
|
|
img.planes[VPX_PLANE_U] = vData;
|
|
|
|
img.planes[VPX_PLANE_V] = uData;
|
|
|
|
}
|
2014-06-30 22:17:20 +08:00
|
|
|
else if (frameFormat == QVideoFrame::Format_RGB32 || frameFormat == QVideoFrame::Format_ARGB32)
|
2014-06-30 20:49:42 +08:00
|
|
|
{
|
|
|
|
img.w = img.h = 0; // Invalid frame. TODO: Implement conversion
|
|
|
|
qWarning() << "Camera: Can't convert from RGB32! Go complain at github.com/tux3/toxgui";
|
|
|
|
}
|
|
|
|
|
|
|
|
lastFrame.unmap();
|
|
|
|
return img;
|
|
|
|
|
|
|
|
}
|