Conflict in showing the correct image in the right box in QML when using OpenGL renderer

60 Views Asked by At

In the code below, I have two webcams to capture images, which I send to a QML item implemented in a C++ class that inherits from the QQuickFramebufferObject class, and then the image is rendered by the QQuickFramebufferObject::Renderer and the QOpenGLFunctions object. Finally, it is shown in the QML window item. The problem is that the webcam images are displayed in the wrong box. What is my mistake?

QML file:

import QtQuick
import QtQuick.Controls
import QtQuick.Window
import QtMultimedia
import Rimor

ApplicationWindow {
    id: root
    property LiveStream     liveStream: LiveStream
    {
    }

    property var image1
    property var image2

    visible: true
    width: 800
    height: 600
    title: "Camera Frame Capture"

    Timer {
        id: timer
        interval: 1000.0 / 24.0
        repeat: true
        onTriggered: {
            imageCapture1.capture();
            // camera1Item.update();

            imageCapture2.capture();
            // camera2Item.update();
        }
    }

    CaptureSession {
        id: captureSession1
        // camera
        camera: Camera {
            id: camera1
            cameraDevice: liveStream._mediaDevices.videoInputs[0]
        }
        imageCapture: ImageCapture {
            id: imageCapture1
            onImageCaptured: (requestId, previewImage) => {
                root.image1 = previewImage;
            }
        }

        recorder: MediaRecorder {
            id: recorder1
        }
        videoOutput: camera1Item
    }

    CaptureSession {
        id: captureSession2
        // camera
        camera: Camera {
            id: camera2
            cameraDevice: liveStream._mediaDevices.videoInputs[1]
        }
        imageCapture: ImageCapture {
            id: imageCapture2
            onImageCaptured: (requestId, previewImage) => {
                root.image2 = previewImage;
            }
        }

        recorder: MediaRecorder {
            id: recorder2
        }
        videoOutput: camera2Item
    }

    SplitView {
        id: splitView
        anchors.fill: parent
        spacing: 5
        handle: Item {
        }

        Rectangle {
            SplitView.minimumWidth: 400
            SplitView.maximumWidth: 400
            SplitView.minimumHeight: 100
            SplitView.maximumHeight: 100
            border.color : "red"
            border.width : 10
            CameraItem {
                id: camera1Item
                frame: image1
                index: 1
                width: parent.width - 10
                height: parent.height - 10
            }
        }

        Rectangle {
            SplitView.minimumWidth: 400
            SplitView.maximumWidth: 400
            SplitView.minimumHeight: 100
            SplitView.maximumHeight: 100
            border.color : "blue"
            border.width : 10
            CameraItem {
                id: camera2Item
                frame: image2
                index: 2
                width: parent.width - 10
                height: parent.height - 10
            }
        }
    }

    Component.onCompleted: {
        camera1.start();
        camera2.start();
        timer.start();
    }
}

Cpp file:

#include "CameraItem.h"

#include <QMediaDevices>
#include <QOpenGLFramebufferObject>
#include <QtConcurrent/QtConcurrent>
#include <QtQuick/QQuickWindow>
#include <qsgsimpletexturenode.h>

#ifndef GLEW_NO_GLU
#/* this is where we can safely include GLU */
#if defined(__APPLE__) && defined(__MACH__)
#include <OpenGL/glu.h>
#else
#include <GL/glu.h>
#endif
#endif

/* ************************************************************************************************
 * Public Constructors & Destructor
 * ************************************************************************************************/
CameraItem::CameraItem(QQuickItem *parent) : QQuickFramebufferObject{parent}, mBrightness{0}, mSaturation{1}, mContrast{1}, mGamma{1}, mHue{0} { setTextureFollowsItemSize(true); }

CameraItem::~CameraItem() {}

/* ************************************************************************************************
 * Renderer Class
 * ************************************************************************************************/
class CameraItem::FBORenderer : public QQuickFramebufferObject::Renderer, protected QOpenGLFunctions {
public:
  FBORenderer(int index) : mIndex(index) { initializeOpenGLFunctions(); }

  void render() override {
    initializeRenderer();
    BindCVMat2GLTexture(mFrame, m_textureInt);
  }

  QOpenGLFramebufferObject *createFramebufferObject(const QSize &size) override {
    QOpenGLFramebufferObjectFormat format;
    format.setAttachment(QOpenGLFramebufferObject::CombinedDepthStencil);
    format.setSamples(4);
    resize(size);
    return new QOpenGLFramebufferObject(size, format);
  }

protected:
  // This function is the only place when it is safe for the renderer and the item to read and write each other members.
  void synchronize(QQuickFramebufferObject *item) override {
    CameraItem *fbo = qobject_cast<CameraItem *>(item);
    if (fbo) {
      mFrame = fbo->cvFrame();
    }
  }

private:
  void initializeRenderer() {
    int frameWidth = mFrame.cols;
    int frameHeight = mFrame.rows;
    if (frameWidth <= 0 || frameHeight <= 0) {
      return;
    }
    glViewport(0, 0, frameWidth, frameHeight);

    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    glEnable(GL_TEXTURE_2D);
    // These are necessary if using glTexImage2D instead of gluBuild2DMipmaps
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
    glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL);

    // Set Projection Matrix
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    gluOrtho2D(0, frameWidth, 0, frameHeight);

    // Switch to Model View Matrix
    glMatrixMode(GL_MODELVIEW);
    glLoadIdentity();

    // Draw a textured quad
    glBegin(GL_QUADS);
    glTexCoord2f(0.0f, 0.0f);
    glVertex2f(0.0f, 0.0f);
    glTexCoord2f(1.0f, 0.0f);
    glVertex2f(frameWidth, 0.0f);
    glTexCoord2f(1.0f, 1.0f);
    glVertex2f(frameWidth, frameHeight);
    glTexCoord2f(0.0f, 1.0f);
    glVertex2f(0.0f, frameHeight);
    glEnd();

    glFlush();
  }
  void BindCVMat2GLTexture(cv::Mat &image, GLuint &imageTexture) {
    if (image.empty()) {
      return;
    }

    glLoadIdentity();
    glMatrixMode(GL_PROJECTION);
    glOrtho(0, image.cols, image.rows, 0, -1, +1);

    cv::cvtColor(image, image, cv::COLOR_HSV2RGB);

    glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
    glTexImage2D(GL_TEXTURE_2D,    // Type of texture
                 0,                // Pyramid level (for mip-mapping) - 0 is the top level
                 GL_RGB,           // Internal colour format to convert to
                 image.cols,       // Image width  i.e. 640 for Kinect in standard mode
                 image.rows,       // Image height i.e. 480 for Kinect in standard mode
                 0,                // Border width in pixels (can either be 1 or 0)
                 GL_RGB,           // Input image format (i.e. GL_RGB, GL_RGBA, GL_BGR etc.)
                 GL_UNSIGNED_BYTE, // Image data type
                 image.ptr());     // The actual image data itself

    GLenum err = glGetError();

    if (err != GL_NO_ERROR) {
      qDebug() << Q_FUNC_INFO << __LINE__ << "Failed glTexImage2D" << err;
    }
  }
  void resize(const QSize &size) {
    glLoadIdentity();
    glMatrixMode(GL_PROJECTION);
    float aspect = size.width() / (float)size.height();
    glOrtho(0, size.width(), size.height(), 0, -1, +1);
  }
  cv::Mat mFrame;
  GLuint m_textureInt{0};
  int mIndex;
};

//! Gamma correction
void gammaCorrection(const cv::Mat &src, cv::Mat &dst, const float gamma) {
  float invGamma = 1 / gamma;

  cv::Mat table(1, 256, CV_8U);
  uchar *p = table.ptr();
  for (int i = 0; i < 256; ++i) {
    p[i] = (uchar)(pow(i / 255.0, invGamma) * 255);
  }

  LUT(src, table, dst);
}

/* ************************************************************************************************
 * Public Functions
 * ************************************************************************************************/
QQuickFramebufferObject::Renderer *CameraItem::createRenderer() const { return new FBORenderer(mIndex); }

cv::Mat CameraItem::cvFrame() { return mMatFrame; }

bool CameraItem::fetchFrame(const QImage &imgFrame) {
  // Correct image format
  QImage img = imgFrame.convertToFormat(QImage::Format_RGB32);

  cv::Mat frame = cv::Mat(img.height(), img.width(), CV_8UC4, (uchar *)img.bits(), img.bytesPerLine());
  //  std::cout << "Camera Name: " << mName.toStdString() << "(" << mIndex << "), Size:" << frame.size << std::endl;

  if (frame.empty()) { // return true if the frame is empty, check again !!!
    return false;
  }

  auto h = frame.rows; // h
  auto w = frame.cols;
  if (h <= 0 || w <= 0) {
    return false;
  }

  int height_temp = (int)height();
  if (height_temp <= 0) {
    height_temp = h;
  }

  cv::Size size(w * height_temp / h, height_temp);
  cv::resize(frame, frame, size, 0, 0, cv::INTER_AREA);

  //! clip and mirror it
  frame = frame(clipFrame(frame));
  cv::flip(frame, frame, 1);

  //! Change frame Contrast and Brightness
  frame.convertTo(frame, -1, mContrast, mBrightness);

  //! Change gamma too
  gammaCorrection(frame, frame, mGamma);

  //! Change saturation and hue
  cv::cvtColor(frame, frame, cv::COLOR_BGR2HSV);
  for (int y = 0; y < frame.rows; y++) {
    for (int x = 0; x < frame.cols; x++) {
      cv::Vec3b &pixel = frame.at<cv::Vec3b>(y, x);

      //! Setting saturation: Second channel is Saturation
      uchar saturation = pixel[1];
      pixel[1] = cv::saturate_cast<uchar>(mSaturation * saturation);

      //! Setting hue: First channel is Hue
      uchar hue = pixel[0];
      pixel[0] = uchar(hue + mHue) % 180;
    }
  }

  mMatFrame = frame;
  update(); // to make the renderer::sync function be called

  return (!mMatFrame.empty());
}

QImage CameraItem::frame() const { return mFrame; }

void CameraItem::setFrame(QImage newFrame) {
  mFrame = newFrame;
  fetchFrame(mFrame);
  emit frameChanged();
}

qreal CameraItem::brightness() { return mBrightness; }

void CameraItem::setBrightness(const qreal &brightness) {
  if (mBrightness == brightness) {
    return;
  }

  mBrightness = brightness;
  emit brightnessChanged();
}

qreal CameraItem::contrast() { return mContrast; }

void CameraItem::setContrast(const qreal &contrast) {
  if (mContrast == contrast) {
    return;
  }

  mContrast = contrast;
  emit contrastChanged();
}

qreal CameraItem::hue() { return mHue; }

void CameraItem::setHue(const qreal &hue) {
  if (mHue == hue) {
    return;
  }

  mHue = hue;
  emit hueChanged();
}

qreal CameraItem::saturation() { return mSaturation; }

void CameraItem::setSaturation(const qreal &saturation) {
  if (mSaturation == saturation) {
    return;
  }

  mSaturation = saturation;
  emit saturationChanged();
}

qreal CameraItem::gamma() { return mGamma; }

void CameraItem::setGamma(const qreal &gamma) {
  if (mGamma == gamma) {
    return;
  }

  mGamma = gamma;
  emit gammaChanged();
}

int CameraItem::index() { return mIndex; }

void CameraItem::setIndex(const int &index) {
  if (mIndex == index) {
    return;
  }

  mIndex = index;
  emit indexChanged();
}

QString CameraItem::name() { return mName; }

void CameraItem::setName(const QString &name) {
  if (mName == name) {
    return;
  }

  mName = name;
  emit nameChanged();
}

/*
 * Clip the current video frame to something a bit closer
 * to the aspect ratio of a human face.  Note: this means
 * faces must be roughly centered to be captured
 */
cv::Rect CameraItem::clipFrame(cv::Mat frame) {
  int wid = frame.cols;
  int newwid = (frame.rows * 8 / 7);
  if (newwid > wid) {
    newwid = wid;
  }

  cv::Rect rect;
  rect.y = 0;
  rect.height = frame.rows;
  rect.x = (wid - newwid) / 2;
  rect.width = newwid;

  return rect;
}

Hpp file:

#ifndef RIMOR_CAMERA_ITEM_H
#define RIMOR_CAMERA_ITEM_H

#pragma once

#include <QCamera>
#include <QImageCapture>
#include <QMediaCaptureSession>
#include <QObject>
#include <QOpenGLFramebufferObjectFormat>
#include <QOpenGLFunctions>
#include <QTimer>
#include <QtQuick/QQuickFramebufferObject>
#include <opencv2/opencv.hpp>

/*! ***********************************************************************************************
 * This class manage camera frames.
 * ************************************************************************************************/
class CameraItem : public QQuickFramebufferObject {
  Q_OBJECT

  Q_PROPERTY(QImage frame READ frame WRITE setFrame NOTIFY frameChanged)
  Q_PROPERTY(qreal brightness READ brightness WRITE setBrightness NOTIFY brightnessChanged)
  Q_PROPERTY(qreal saturation READ saturation WRITE setSaturation NOTIFY saturationChanged)
  Q_PROPERTY(qreal contrast READ contrast WRITE setContrast NOTIFY contrastChanged)
  Q_PROPERTY(qreal gamma READ gamma WRITE setGamma NOTIFY gammaChanged)
  Q_PROPERTY(qreal hue READ hue WRITE setHue NOTIFY hueChanged)
  Q_PROPERTY(int index READ index WRITE setIndex NOTIFY indexChanged)
  Q_PROPERTY(QString name READ name WRITE setName NOTIFY nameChanged)

  QML_ELEMENT

public:
  class FBORenderer;
  /* Public Constructors & Destructor
   * ****************************************************************************************/
  CameraItem(QQuickItem *parent = nullptr);
  ~CameraItem();

  /* Public Functions
   * ****************************************************************************************/
  Renderer *createRenderer() const;

  cv::Mat cvFrame();

  QImage frame() const;
  void setFrame(QImage newFrame);

  /* Public Setters and Getters
   * ****************************************************************************************/

  //! Brightness methods
  qreal brightness();
  void setBrightness(const qreal &brightness);

  //! Contrast methods
  qreal contrast();
  void setContrast(const qreal &contrast);

  //! Hue methods
  qreal hue();
  void setHue(const qreal &hue);

  //! Saturation methods
  qreal saturation();
  void setSaturation(const qreal &saturation);

  //! Gamma methods
  qreal gamma();
  void setGamma(const qreal &gamma);

  //! Index methods
  int index();
  void setIndex(const int &index);

  //! Index methods
  QString name();
  void setName(const QString &name);

public slots:
  /* Public Slots
   * ****************************************************************************************/
  bool fetchFrame(const QImage &imgFrame);

signals:
  /* Signals
   * ****************************************************************************************/
  void frameChanged();
  void brightnessChanged();
  void contrastChanged();
  void hueChanged();
  void saturationChanged();
  void gammaChanged();
  void indexChanged();
  void nameChanged();

private:
  /* Private functions
   * ****************************************************************************************/
  cv::Rect clipFrame(cv::Mat frame);

private:
  /* Attributes
   * ****************************************************************************************/
  cv::Mat mMatFrame; //! Stored in HSV format
  QByteArray mCameraId;

  QImage mFrame;
  bool mCameraLive{};

  //! Camera settings
  qreal mBrightness;
  qreal mSaturation;
  qreal mContrast;
  qreal mGamma;
  qreal mHue;
  int mIndex{};
  QString mName;
};

// QML_DECLARE_TYPE(FBORendererCPP)
#endif // RIMOR_CAMERA_ITEM_H
0

There are 0 best solutions below