[Interest] Best way to threaded render (images) going forward?
Wesley Krasko
wesblake at gmail.com
Fri Nov 22 00:04:52 CET 2019
Thanks. For now anyways I'm trying to just move from QGLWidget to
QOpenGLWidget, buy time.
Can I get some help? In the QGLWidget one I currently have there is a
separate thread to render and it uses QPainter.
I'm trying to do the same for the new class but I can't seem to get a
QPainter working no matter what! The app runs but as soon as drawing starts
it crashes because, but I"m using similar methods to what I did before. If
I comment out just the attempt to create QPainter, it works fine, renders
fast just doing the glclear part. Hopefully I can attach here. There is a
lot of "Extra" from our previous surface/class, but it's not yet used
anyways. As mentioned, this is working as is, but where you find "//TODO
HERE" in the code, I tried many ways to use a QPainter in the thread
without success.
On Tue, Nov 19, 2019 at 11:18 AM Christoph Feck <cfeck at kde.org> wrote:
> On 11/19/19 19:52, Wesley Krasko wrote:
> > Yes, I've read about RHI but it all seems to imply it's for QT Quick,
> what
> > about Qt Widget based apps going forward?
>
> Let me quote what is written there: "We will need to base all our
> rendering infrastructure (QPainter, the Qt Quick Scenegraph, and our 3D
> support) on top of that layer."
>
> QWidgets use QPainter for rendering.
>
> > Is my best bet QOpenGLWidget until 6.x comes out then and then re-write
> again?
>
> If you are using OpenGL, RHI won't magically change your code. Use
> QPainter, the Qt Quick scene graph, or Qt3D, depending on your case.
>
> _______________________________________________
> Interest mailing list
> Interest at qt-project.org
> https://lists.qt-project.org/listinfo/interest
>
--
Wes Krasko
www.worldwidewes.com
www.kraskofamily.com
"Stay away from negative people. They have a problem for every solution."
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.qt-project.org/pipermail/interest/attachments/20191121/38feb174/attachment.html>
-------------- next part --------------
#ifndef GLWIDGET_H
#define GLWIDGET_H
#include "framequeue.h"
#include "hapiinterface.h"
#include <QOpenGLWidget>
#include <QOpenGLContext>
#include <QImage>
#include <QThread>
#include <QMutex>
#include <QList>
#include <QWaitCondition>
class GLWidget;
class Renderer : public QObject
{
Q_OBJECT
public:
Renderer(GLWidget *);
~Renderer();
void lockRenderer() { m_renderMutex.lock(); }
void unlockRenderer() { m_renderMutex.unlock(); }
QMutex *grabMutex() { return &m_grabMutex; }
QWaitCondition *grabCond() { return &m_grabCond; }
void prepareExit() { m_exiting = true; m_grabCond.wakeAll(); }
enum {
FULLVL = 0,//"Full Video Local"
FULLVR = 1,//"Full Video Remote"
PIP1 = 2,//PIP one line
PIP2 = 3,//PIP 2 line
LSV = 4,//Left Side Video
RSV = 5//Right Side Video
};
FrameQueue *selfView;
QList<FrameQueue*> remoteViewList;
signals:
void contextWanted();
public slots:
void start();
void stop();
void pause();
void resume();
void resumeToClose();
void setVidLayout(int layout);
void setPrivacyMode(bool isPrivacy);
void setNoRemoteVideo(int lineHandle, bool noRemote);
void scaleUp();
void scaleDown();
void disableEvents();
void render();
void eventHandler(int event, long param1, long param2);
private:
static const int animationFPS = 30;
static const int pipWidth = 160;
static const int pipHeight = 120;
bool m_inited;
GLWidget *m_glwidget;
QMutex m_renderMutex;
QMutex m_grabMutex;
QWaitCondition m_grabCond;
bool m_exiting;
bool skipEvents;
QImage selfImageToRender;
//I'm putting this in temporarily.
//I'm hoping to find a more generic way so I can use lines available from Xyclops like everywhere else.
QImage remoteImageToRender1;
QImage remoteImageToRender2;
QImage privacyImage;
QImage holdImage;
QImage audioOnlyImage;
QImage noRemoteVideoImage;
bool pauseRendering;
int localCWidth;
int localCHeight;
int scalingFactor;
bool m_privacyMode;
int m_currentVideoLayout;
int m_sessionVideoLayout;
QRect sizeFrameToFit(int frameWidth, int frameHeight, int position);
};
class GLWidget : public QOpenGLWidget
{
Q_OBJECT
public:
GLWidget(QWidget *parent);
~GLWidget();
void stopRendering();
void startRendering();
void pauseRendering();
void resumeRendering();
void resumeForClose();
bool isStarted();
void setVideoLayout(int layout);
// int getVideoLayout();
void setPrivacyMode(bool isPrivacy);
void setNoRemoteVideo(int lineHandle, bool noRemote);
void scaleUp();
void scaleDown();
void disableEvents();
enum {
NORMALMODE = 0,
SIDEBYSIDE = 1,
REMOTEONLY = 2
};
protected:
void paintEvent(QPaintEvent *) Q_DECL_OVERRIDE { }
signals:
void renderRequested();
void vidLayoutChange(int);
void privacyModeChange(bool);
void setNoRemote(int, bool);
public slots:
void grabContext();
private slots:
void onAboutToCompose();
void onFrameSwapped();
void onAboutToResize();
void onResized();
private:
QThread *m_thread;
Renderer *m_renderer;
bool m_isStarted;
};
#endif // GLWIDGET_H
-------------- next part --------------
#include "glwidget.h"
#include <QGuiApplication>
#include <QPainter>
//Leak detection
#ifdef Q_OS_WIN32
#ifdef QT_DEBUG
#include "debugtools/reportinghook.h"
#include "debugtools/setdebugnew.h"
#define new DEBUG_NEW
#endif
#endif
//-----------------------------------------------------------------------------
static VPTYPE_HTRACE tGLVideo = VPRegisterTraceModule("UI-Rendering", VPTRACE_ERROR|VPTRACE_WARN);
//-----------------------------------------------------------------------------
GLWidget::GLWidget(QWidget *parent)
: QOpenGLWidget(parent)
, m_isStarted(false)
{
connect(this, &QOpenGLWidget::aboutToCompose, this, &GLWidget::onAboutToCompose);
connect(this, &QOpenGLWidget::frameSwapped, this, &GLWidget::onFrameSwapped);
connect(this, &QOpenGLWidget::aboutToResize, this, &GLWidget::onAboutToResize);
connect(this, &QOpenGLWidget::resized, this, &GLWidget::onResized);
m_thread = new QThread;
m_renderer = new Renderer(this);
m_renderer->moveToThread(m_thread);
connect(m_thread, &QThread::finished, m_renderer, &QObject::deleteLater);
connect(this, &GLWidget::renderRequested, m_renderer, &Renderer::render);
connect(m_renderer, &Renderer::contextWanted, this, &GLWidget::grabContext);
connect(this, SIGNAL(vidLayoutChange(int)), m_renderer, SLOT(setVideoLayout(int)));
connect(this, SIGNAL(privacyModeChange(bool)), m_renderer, SLOT(setPrivacyMode(bool)));
connect(this, SIGNAL(setNoRemote(int,bool)), m_renderer, SLOT(setNoRemoteVideo(int,bool)));
}
GLWidget::~GLWidget()
{
// stopRendering();
}
void GLWidget::stopRendering()
{
m_renderer->prepareExit();
m_thread->quit();
m_thread->wait();
delete m_thread;
}
void GLWidget::startRendering()
{
m_thread->start();
m_isStarted = true;
QMetaObject::invokeMethod(m_renderer, "start");
}
void GLWidget::pauseRendering()
{
QMetaObject::invokeMethod(m_renderer, "pause");
}
void GLWidget::resumeRendering()
{
QMetaObject::invokeMethod(m_renderer, "resume");
}
void GLWidget::resumeForClose()
{
QMetaObject::invokeMethod(m_renderer, "resumeToClose");
}
bool GLWidget::isStarted()
{
return m_isStarted;
}
void GLWidget::setVideoLayout(int layout)
{
emit vidLayoutChange(layout);
}
//TODO
//int GLWidget::getVideoLayout()
//{
// return glPainter.getVidLayout();
//}
void GLWidget::setPrivacyMode(bool isPrivacy)
{
emit privacyModeChange(isPrivacy);
}
void GLWidget::setNoRemoteVideo(int lineHandle, bool noRemote)
{
emit setNoRemote(lineHandle, noRemote);
}
void GLWidget::scaleUp()
{
QMetaObject::invokeMethod(m_renderer, "scaleUp");
}
void GLWidget::scaleDown()
{
QMetaObject::invokeMethod(m_renderer, "scaleDown");
}
void GLWidget::disableEvents()
{
QMetaObject::invokeMethod(m_renderer, "disableEvents");
}
void GLWidget::onAboutToCompose()
{
//We are on the gui thread here. Composition is about to begin, wait until the render thread finishes.
m_renderer->lockRenderer();
}
void GLWidget::onFrameSwapped()
{
m_renderer->unlockRenderer();
//Assuming a blocking swap, our animation is driven purely by the vsync in this example.
emit renderRequested();
}
void GLWidget::onAboutToResize()
{
m_renderer->lockRenderer();
}
void GLWidget::onResized()
{
m_renderer->unlockRenderer();
}
void GLWidget::grabContext()
{
m_renderer->lockRenderer();
QMutexLocker lock(m_renderer->grabMutex());
context()->moveToThread(m_thread);
m_renderer->grabCond()->wakeAll();
m_renderer->unlockRenderer();
}
Renderer::Renderer(GLWidget *w)
: m_inited(false),
m_glwidget(w),
m_exiting(false),
skipEvents(false),
pauseRendering(false),
localCWidth(0),
localCHeight(0),
scalingFactor(100),
m_privacyMode(false),
m_currentVideoLayout(0),
m_sessionVideoLayout(0),
privacyImage(":/resource/img/PrivacyMode.png"),
holdImage(":/resource/img/CallOnHold.png"),
audioOnlyImage(":/resource/img/AudioOnly.png"),
noRemoteVideoImage(":/resource/img/NoRemoteVideo.png")
{
selfView = new FrameQueue("selfView");
long lineCount = 4;
int iRet = HAPIGetPropertyInt(catPhone, propPhoneCallsLimit, &lineCount);
QString remoteName;
for(int i=0; i<lineCount; i++){
remoteViewList.append(new FrameQueue(remoteName.sprintf("remoteView%i",i)));
}
QObject::connect(&hapiIface, SIGNAL(eventOccurred(int,long,long)), SLOT(eventHandler(int,long,long)), Qt::QueuedConnection);
}
Renderer::~Renderer()
{
VPTrace(tGLVideo, VPTRACE_INFO, "Cleaning up Renderer...");
if(selfView)
delete selfView;
for(int i=0; i<remoteViewList.size(); i++){
FrameQueue *curQueue = remoteViewList.at(i);
if(curQueue)
delete curQueue;
}
}
void Renderer::start()
{
HAPIRegisterSelfView(selfView);
}
void Renderer::stop()
{
HAPIUnregisterSelfView(selfView);
}
void Renderer::pause()
{
pauseRendering = true;
HAPIUnregisterSelfView(selfView);
}
void Renderer::resume()
{
pauseRendering = false;
HAPIRegisterSelfView(selfView);
}
void Renderer::resumeToClose()
{
pauseRendering = false;
}
void Renderer::setVidLayout(int layout)
{
m_currentVideoLayout = layout;
}
void Renderer::setPrivacyMode(bool isPrivacy)
{
m_privacyMode = isPrivacy;
if(m_privacyMode)
HAPIUnregisterSelfView(selfView);
else
HAPIRegisterSelfView(selfView);
}
void Renderer::setNoRemoteVideo(int lineHandle, bool noRemote)
{
if(lineHandle<remoteViewList.size())
remoteViewList.at(lineHandle)->setNoRemoteVideo(noRemote);
}
void Renderer::scaleUp()
{
scalingFactor += 10;
if(scalingFactor > 100)
scalingFactor = 100;
}
void Renderer::scaleDown()
{
scalingFactor -= 10;
if(scalingFactor < 30)
scalingFactor = 30;
}
void Renderer::disableEvents()
{
skipEvents = true;
}
void Renderer::render()
{
if(m_exiting)
return;
QOpenGLContext *ctx = m_glwidget->context();
if(!ctx)//QOpenGLWidget not yet initialized
return;
//Grab the context.
m_grabMutex.lock();
emit contextWanted();
m_grabCond.wait(&m_grabMutex);
QMutexLocker lock(&m_renderMutex);
m_grabMutex.unlock();
if(m_exiting)
return;
Q_ASSERT(ctx->thread() == QThread::currentThread());
//Make the context (and an offscreen surface) current for this thread.
//The QOpenGLWidget's fbo is bound in the context.
m_glwidget->makeCurrent();
if(!m_inited) {
m_inited = true;
// initializeOpenGLFunctions();
}
glClearColor(0.27f, 0.26f, 0.26f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
if(pauseRendering){
return;
} else {
bool haveSelfFrame = selfView->popElement(selfImageToRender);
if(haveSelfFrame || !selfImageToRender.isNull()){
//TODO HERE
//How do I get QPainter here?! The context is moved yet I'm getting errors!
qDebug() << "WTF";
}
}
//Make no context current on this thread and move the QOpenGLWidget's context back to the gui thread.
m_glwidget->doneCurrent();
ctx->moveToThread(qGuiApp->thread());
//Schedule composition. Note that this will use QueuedConnection, meaning that update() will be invoked on the gui thread.
QMetaObject::invokeMethod(m_glwidget, "update");
}
void Renderer::eventHandler(int event, long param1, long param2)
{
if(skipEvents)
return;
int queueUsed = (int)param1;
int p2Pass = (int)param2;
int linesOnCall = 0;
bool offCall = true;
switch(event){
case HAPI_EV_CALL_CONNECTED:
if(queueUsed<remoteViewList.size() && remoteViewList.at(queueUsed)->getRegisteredLine()==-1){//Queue free
remoteViewList.at(queueUsed)->setRegisteredLine(queueUsed);
remoteViewList.at(queueUsed)->lineStatus = event;
}
for(int i=0; i<remoteViewList.size(); i++){
if(remoteViewList.at(i)->getRegisteredLine()>-1){
linesOnCall += 1;
}
}
if(linesOnCall==1){//Animate, starting call from idle.
localCWidth = m_glwidget->width();
localCHeight = m_glwidget->height();
}
if( ((param2 & VPCALL_MEDIA_VIDEO) != 0) ){
//
} else {
if(queueUsed<remoteViewList.size())
remoteViewList.at(queueUsed)->setIsAudioOnly(true);
}
m_currentVideoLayout = m_sessionVideoLayout;
break;
case HAPI_EV_VIDEOIN_CHANNEL_OPEN:
if(queueUsed<remoteViewList.size() && remoteViewList.at(queueUsed)->getRegisteredLine()==(int)param1){//Queue free
remoteViewList.at(queueUsed)->lineStatus = event;
HAPIRegisterMainView(queueUsed,remoteViewList.at(queueUsed));
}
if(queueUsed<remoteViewList.size() && remoteViewList.at(queueUsed)->getRegisteredLine()==-1){//Queue free, but "early media" type case
remoteViewList.at(queueUsed)->setRegisteredLine(queueUsed);
remoteViewList.at(queueUsed)->lineStatus = event;
HAPIRegisterMainView(queueUsed,remoteViewList.at(queueUsed));
}
if(!remoteViewList.at(queueUsed)->alreadyOpen)
m_currentVideoLayout = m_sessionVideoLayout;
remoteViewList.at(queueUsed)->alreadyOpen = true;
break;
case HAPI_EV_VIDEOIN_CHANNEL_CLOSE:
if(queueUsed<remoteViewList.size() && remoteViewList.at(queueUsed)->getRegisteredLine()==(int)param1){//Queue free
remoteViewList.at(queueUsed)->lineStatus = event;
}
break;
case HAPI_EV_AUDIOIN_CHANNEL_OPEN:
if(queueUsed<remoteViewList.size() && remoteViewList.at(queueUsed)->getRegisteredLine()==(int)param1){//Queue free
remoteViewList.at(queueUsed)->lineStatus = event;
}
break;
case HAPI_EV_CALL_DISCONNECTED:
if(queueUsed<remoteViewList.size() && remoteViewList.at(queueUsed)->getRegisteredLine()==(int)param1){//Queue free
remoteViewList.at(queueUsed)->lineStatus = event;
HAPIUnregisterMainView(queueUsed,remoteViewList.at(queueUsed));
remoteViewList.at(queueUsed)->setRegisteredLine(-1);
remoteViewList.at(queueUsed)->setIsAudioOnly(false);
remoteViewList.at(queueUsed)->setOnHold(false);
remoteViewList.at(queueUsed)->emptyQueue();
remoteViewList.at(queueUsed)->alreadyOpen = false;
}
remoteImageToRender1.fill(QColor(68,67,67));
remoteImageToRender2.fill(QColor(68,67,67));
m_sessionVideoLayout = m_currentVideoLayout;
for(int i=0; i<remoteViewList.size(); i++){
if(remoteViewList.at(i)->getRegisteredLine()>-1)
offCall = false;
}
if(offCall)
m_currentVideoLayout = GLWidget::NORMALMODE;
break;
case HAPI_EV_CALL_ON_HOLD_STATUS:
if(p2Pass==VPHOLD_LOCAL || p2Pass==VPHOLD_REMOTE){
m_sessionVideoLayout = m_currentVideoLayout;
//Call on hold
if(queueUsed<remoteViewList.size() && remoteViewList.at(queueUsed)->getRegisteredLine()==(int)param1){//Queue free
remoteViewList.at(queueUsed)->lineStatus = event;
remoteViewList.at(queueUsed)->setOnHold(true);
}
}
if(p2Pass==0){
//Off hold
if(queueUsed<remoteViewList.size() && remoteViewList.at(queueUsed)->getRegisteredLine()==(int)param1){//Queue free
remoteViewList.at(queueUsed)->lineStatus = HAPI_EV_CALL_CONNECTED;
remoteViewList.at(queueUsed)->setOnHold(false);
}
}
break;
default:
break;
}
}
QRect Renderer::sizeFrameToFit(int frameWidth, int frameHeight, int position)
{
int aspectX = frameWidth;
int aspectY = frameHeight;
//FULLV is default position/size
int containerW = m_glwidget->width();
int containerH = m_glwidget->height();
int xstart = 0;
int ystart = 0;
//ANIMATIONS
if(position==PIP1 || position==PIP2){//1 line on call, normal PIP, or 2 lines on call, middle PIP
containerW = pipWidth;
containerH = pipHeight;
if(localCWidth>pipWidth){//Still animating
containerW = localCWidth = localCWidth-(int)((m_glwidget->width()-pipWidth)/animationFPS);
containerH = localCHeight = localCHeight-(int)((m_glwidget->height()-pipHeight)/animationFPS);
}
}
if(position==FULLVL){//Check for hangup animation
if(localCWidth<m_glwidget->width() && localCWidth!=0){//Still animating
containerW = localCWidth = localCWidth+(int)((m_glwidget->width()-pipWidth)/animationFPS);
containerH = localCHeight = localCHeight+(int)((m_glwidget->height()-pipHeight)/animationFPS);
//Ensure we don't animate past the composer bounds
if(containerW>m_glwidget->width()) containerW = m_glwidget->width();
if(containerH>m_glwidget->height()) containerH = m_glwidget->height();
}
}
if(position==LSV || position==RSV){//Half screen, i.e., side by side
containerW = (int)(m_glwidget->width()/2);
}
// All CIF related modes use 11:12 pixel shape which means
// that these modes need to be stretch out to 4:3 to look right
if (((frameWidth == 704) && (frameHeight == 576)) ||
((frameWidth == 352) && (frameHeight == 288)) ||
((frameWidth == 176) && (frameHeight == 144)))
{
aspectX = 4;
aspectY = 3;
}
int aspectWidth = containerW;
int aspectHeight = (aspectWidth * aspectY) / aspectX;
// If the height does not fit in the client, then calculate the width
// using the height instead..
if ((aspectHeight > containerH))
{
aspectHeight = containerH;
aspectWidth = (aspectHeight * aspectX) / aspectY;
}
// Scale the image
aspectWidth = aspectWidth * scalingFactor/100;
aspectHeight = aspectHeight * scalingFactor/100;
if(aspectWidth < containerW) xstart = (containerW - aspectWidth) / 2;
if(aspectHeight < containerH) ystart = (containerH - aspectHeight) / 2;
if(position==PIP1 || position==PIP2 || (position==FULLVL && localCWidth<m_glwidget->width() && localCWidth!=0))
ystart += m_glwidget->height()-containerH;
if(position==PIP2)
xstart += ((int)(m_glwidget->width()/2))-80;
if(position==RSV)
xstart += (int)(m_glwidget->width()/2);
/*VPTrace(tGLVideo, VPTRACE_STREAM2, "[%s] paint() At %i, %i, %i x %i, Aspect = %i:%i, into %i x %i",
myName.toStdString().c_str(), xstart, ystart, aspectWidth, aspectHeight, aspectX, aspectY, this->width(), this->height());*/
return QRect(xstart, ystart, aspectWidth, aspectHeight);
}
More information about the Interest
mailing list