1
0
mirror of https://github.com/qTox/qTox.git synced 2024-03-22 14:00:36 +08:00

Use a global AL context, not per-call

This commit is contained in:
Tux3 / Mlkj / !Lev.uXFMLA 2014-08-30 14:40:41 +02:00
parent 5b617dd4fb
commit 35731e1f53
3 changed files with 41 additions and 35 deletions

View File

@ -63,6 +63,26 @@ Core::Core(Camera* cam, QThread *coreThread) :
calls[i].sendVideoTimer->moveToThread(coreThread); calls[i].sendVideoTimer->moveToThread(coreThread);
connect(calls[i].sendVideoTimer, &QTimer::timeout, [this,i](){sendCallVideo(i);}); connect(calls[i].sendVideoTimer, &QTimer::timeout, [this,i](){sendCallVideo(i);});
} }
// OpenAL init
alOutDev = alcOpenDevice(nullptr);
if (!alOutDev)
{
qWarning() << "Core: Cannot open output audio device";
}
else
{
alContext=alcCreateContext(alOutDev,nullptr);
if (!alcMakeContextCurrent(alContext))
{
qWarning() << "Core: Cannot create output audio context";
alcCloseDevice(alOutDev);
}
}
alInDev = alcCaptureOpenDevice(NULL,av_DefaultSettings.audio_sample_rate, AL_FORMAT_MONO16,
(av_DefaultSettings.audio_frame_duration * av_DefaultSettings.audio_sample_rate * 4) / 1000);
if (!alInDev)
qWarning() << "Core: Cannot open input audio device";
} }
Core::~Core() Core::~Core()
@ -78,6 +98,16 @@ Core::~Core()
delete[] videobuf; delete[] videobuf;
videobuf=nullptr; videobuf=nullptr;
} }
if (alContext)
{
alcMakeContextCurrent(nullptr);
alcDestroyContext(alContext);
}
if (alOutDev)
alcCloseDevice(alOutDev);
if (alInDev)
alcCaptureCloseDevice(alInDev);
} }
void Core::start() void Core::start()

5
core.h
View File

@ -104,8 +104,6 @@ public:
bool videoEnabled; bool videoEnabled;
bool active; bool active;
bool muteMic; bool muteMic;
ALCdevice* alOutDev, *alInDev;
ALCcontext* alContext;
ALuint alSource; ALuint alSource;
}; };
@ -303,6 +301,9 @@ private:
static const int videobufsize; static const int videobufsize;
static uint8_t* videobuf; static uint8_t* videobuf;
static int videoBusyness; // Used to know when to drop frames static int videoBusyness; // Used to know when to drop frames
static ALCdevice* alOutDev, *alInDev;
static ALCcontext* alContext;
}; };
#endif // CORE_HPP #endif // CORE_HPP

View File

@ -6,6 +6,9 @@ const int Core::videobufsize{TOXAV_MAX_VIDEO_WIDTH * TOXAV_MAX_VIDEO_HEIGHT * 4}
uint8_t* Core::videobuf; uint8_t* Core::videobuf;
int Core::videoBusyness; int Core::videoBusyness;
ALCdevice* Core::alOutDev, *Core::alInDev;
ALCcontext* Core::alContext;
void Core::prepareCall(int friendId, int callId, ToxAv* toxav, bool videoEnabled) void Core::prepareCall(int friendId, int callId, ToxAv* toxav, bool videoEnabled)
{ {
qDebug() << QString("Core: preparing call %1").arg(callId); qDebug() << QString("Core: preparing call %1").arg(callId);
@ -20,33 +23,9 @@ void Core::prepareCall(int friendId, int callId, ToxAv* toxav, bool videoEnabled
calls[callId].videoEnabled = videoEnabled; calls[callId].videoEnabled = videoEnabled;
toxav_prepare_transmission(toxav, callId, av_jbufdc, av_VADd, videoEnabled); toxav_prepare_transmission(toxav, callId, av_jbufdc, av_VADd, videoEnabled);
// Audio output // Audio
calls[callId].alOutDev = alcOpenDevice(nullptr);
if (!calls[callId].alOutDev)
{
qWarning() << "Coreav: Cannot open output audio device, hanging up call";
toxav_hangup(toxav, callId);
return;
}
calls[callId].alContext=alcCreateContext(calls[callId].alOutDev,nullptr);
if (!alcMakeContextCurrent(calls[callId].alContext))
{
qWarning() << "Coreav: Cannot create output audio context, hanging up call";
alcCloseDevice(calls[callId].alOutDev);
toxav_hangup(toxav, callId);
return;
}
alGenSources(1, &calls[callId].alSource); alGenSources(1, &calls[callId].alSource);
alcCaptureStart(alInDev);
// Audio Input
calls[callId].alInDev = alcCaptureOpenDevice(NULL,av_DefaultSettings.audio_sample_rate, AL_FORMAT_MONO16, (av_DefaultSettings.audio_frame_duration * av_DefaultSettings.audio_sample_rate * 4) / 1000);
if (!calls[callId].alInDev)
{
qWarning() << "Coreav: Cannot open input audio device, hanging up call";
toxav_hangup(toxav, callId);
return;
}
alcCaptureStart(calls[callId].alInDev);
// Go // Go
calls[callId].active = true; calls[callId].active = true;
@ -164,11 +143,7 @@ void Core::cleanupCall(int callId)
calls[callId].sendVideoTimer->stop(); calls[callId].sendVideoTimer->stop();
if (calls[callId].videoEnabled) if (calls[callId].videoEnabled)
Widget::getInstance()->getCamera()->unsuscribe(); Widget::getInstance()->getCamera()->unsuscribe();
alcMakeContextCurrent(nullptr); alcCaptureStop(alInDev);
alcDestroyContext(calls[callId].alContext);
alcCloseDevice(calls[callId].alOutDev);
alcCaptureStop(calls[callId].alInDev);
alcCaptureCloseDevice(calls[callId].alInDev);
} }
void Core::playCallAudio(ToxAv* toxav, int32_t callId, int16_t *data, int samples, void *user_data) void Core::playCallAudio(ToxAv* toxav, int32_t callId, int16_t *data, int samples, void *user_data)
@ -199,10 +174,10 @@ void Core::sendCallAudio(int callId, ToxAv* toxav)
bool frame = false; bool frame = false;
ALint samples; ALint samples;
alcGetIntegerv(calls[callId].alInDev, ALC_CAPTURE_SAMPLES, sizeof(samples), &samples); alcGetIntegerv(alInDev, ALC_CAPTURE_SAMPLES, sizeof(samples), &samples);
if(samples >= framesize) if(samples >= framesize)
{ {
alcCaptureSamples(calls[callId].alInDev, buf, framesize); alcCaptureSamples(alInDev, buf, framesize);
frame = 1; frame = 1;
} }