Added new example project for live decoding in QtQuick 2 (contibution of Luiggi Reffatti from Fira Soft ). Will be examined in the future to embed QZxingFilter implementation to the main implementation fo the library.

This commit is contained in:
favoritas37 2016-09-27 03:06:28 +03:00
parent fa8d3ca99f
commit fa186c2cc9
7 changed files with 463 additions and 0 deletions

View File

@ -0,0 +1,197 @@
#include "QZXingFilter.h"
#include <QDebug>
#include <QtConcurrent/QtConcurrent>
QZXingFilter::QZXingFilter(QObject *parent)
: QAbstractVideoFilter(parent)
, decoding(false)
{
/// By default all the types are enabled but hence that it is extra processing.
decoder.setDecoder(QZXing::DecoderFormat_Aztec | QZXing::DecoderFormat_QR_CODE);
/// Conecting signals to handlers that will send signals to QML
connect(&decoder, &QZXing::decodingStarted,
this, &QZXingFilter::handleDecodingStarted);
connect(&decoder, &QZXing::decodingFinished,
this, &QZXingFilter::handleDecodingFinished);
connect(&decoder, &QZXing::tagFound,
this, &QZXingFilter::handleTagFound);
}
QZXingFilter::~QZXingFilter()
{
}
void QZXingFilter::handleDecodingStarted()
{
decoding = true;
emit decodingStarted();
emit isDecodingChanged();
}
void QZXingFilter::handleDecodingFinished(bool succeeded)
{
decoding = false;
emit decodingFinished(succeeded, decoder.getProcessTimeOfLastDecoding());
emit isDecodingChanged();
}
void QZXingFilter::handleTagFound(QString tag)
{
// qDebug() << "handleTagFound";
emit tagFound(tag);
}
QVideoFilterRunnable * QZXingFilter::createFilterRunnable()
{
return new QZXingFilterRunnable(this);
}
/// Qt cant natively create a QImage from certain PixelFormats (BGR and YUV).
/// As Android QVideoFrames are encoded as BGR, we created this conversion function.
QImage QZXingFilter::fromBGRAtoARGB(uchar * data, QSize size, QVideoFrame::PixelFormat pixelFormat)
{
if(pixelFormat != QVideoFrame::Format_BGRA32
&& pixelFormat != QVideoFrame::Format_BGRA32_Premultiplied
&& pixelFormat != QVideoFrame::Format_BGR32)
{
return QImage();
}
QImage image(data, size.width(), size.height(), QImage::Format_ARGB32);
int curPixel = 0;
unsigned char * pCur = 0;
unsigned char * pPixel = image.bits();
for (int i = 0; i < size.width() * size.height(); ++i)
{
curPixel = *((int *)pPixel); /// This changes the order of the bytes. Endianness?
pCur = (unsigned char *)&curPixel;
/*B*/ pPixel[0] = pCur[0]; // B
/*G*/ pPixel[1] = pCur[1]; // G
/*R*/ pPixel[2] = pCur[2]; // R
/*A*/ pPixel[3] = 0xFF; // A Channel is ignored.
pPixel += 4;
}
return image;
}
///
/// QZXingFilterRunnable
///
QZXingFilterRunnable::QZXingFilterRunnable(QZXingFilter * filter)
: QObject(nullptr)
, filter(filter)
{
}
QVideoFrame QZXingFilterRunnable::run(QVideoFrame * input, const QVideoSurfaceFormat &surfaceFormat, RunFlags flags)
{
Q_UNUSED(surfaceFormat);
Q_UNUSED(flags);
/// We dont want to decode every single frame we get, as this would be very costly
/// These checks are attempt on having only 1 frame being processed at a time.
if(!input || !input->isValid())
{
// qDebug() << "[QZXingFilterRunnable] Invalid Input ";
return * input;
}
if(filter->isDecoding())
{
// qDebug() << "------ decoder busy.";
return * input;
}
if(!filter->processThread.isFinished())
{
// qDebug() << "--[]-- decoder busy.";
return * input;
}
filter->decoding = true;
/// Copy the data we need to the filter.
/// TODO: Depending on the system / platform, this copy hangs up the UI for some seconds. Fix this.
filter->frame.copyData(* input);
/// All processing that has to happen in another thread, as we are now in the UI thread.
filter->processThread = QtConcurrent::run(this, &QZXingFilterRunnable::processVideoFrameProbed, filter->frame);
return * input;
}
void QZXingFilterRunnable::processVideoFrameProbed(SimpleVideoFrame & videoFrame)
{
static unsigned int i = 0; i++;
// qDebug() << "Future: Going to process frame: " << i;
/// Create QImage from QVideoFrame.
QImage::Format imageFormat = QVideoFrame::imageFormatFromPixelFormat(videoFrame.pixelFormat);
QImage image((uchar*)videoFrame.data.data(), videoFrame.size.width(), videoFrame.size.height(), imageFormat);
/// If it fails, it's probably a format problem.
/// Let's try to convert it from BGR formats to RGB
if(image.isNull())
image = QZXingFilter::fromBGRAtoARGB((uchar*)videoFrame.data.data(), videoFrame.size, videoFrame.pixelFormat);
/// This is a forced "conversion", colors end up swapped.
if(image.isNull() && videoFrame.pixelFormat == QVideoFrame::Format_BGR555)
image = QImage((uchar*)videoFrame.data.data(), videoFrame.size.width(), videoFrame.size.height(), QImage::Format_RGB555);
/// This is a forced "conversion", colors end up swapped.
if(image.isNull() && videoFrame.pixelFormat == QVideoFrame::Format_BGR565)
image = QImage((uchar*)videoFrame.data.data(), videoFrame.size.width(), videoFrame.size.height(), QImage::Format_RGB16);
/// This is a forced "conversion", colors end up swapped.
if(image.isNull() && videoFrame.pixelFormat == QVideoFrame::Format_BGR24)
image = QImage((uchar*)videoFrame.data.data(), videoFrame.size.width(), videoFrame.size.height(), QImage::Format_RGB888);
/// TODO: Handle (create QImages from) YUV formats.
if(image.isNull())
{
qDebug() << "QZXingFilterRunnable error: Cant create image file to process.";
qDebug() << "Maybe it was a format conversion problem? ";
qDebug() << "VideoFrame format: " << videoFrame.pixelFormat;
qDebug() << "Image corresponding format: " << imageFormat;
filter->decoding = false;
return;
}
/// The frames we get from the camera may be reflected horizontally or vertically
/// As the decoder can't handle reflected frames, we swap them in all possible frames, changing the swap mode each frame.
/// TODO: Maybe there is a better way to know this orientation beforehand? Or should we try decoding all of them?
switch (i % 4)
{
case 0:
image = image.mirrored(false, false);
break;
case 1:
image = image.mirrored(true, false);
break;
case 2:
image = image.mirrored(false, true);
break;
case 3:
image = image.mirrored(true, true);
break;
}
// qDebug() << "image mirrored type == " << (i % 4);
// qDebug() << "image.size() " << image.size();
// qDebug() << "image.format() " << image.format();
// QString path;
// path = QStandardPaths::writableLocation(QStandardPaths::PicturesLocation) + "/qrtest";
// qDebug() << "saving image " + QString::number(i) + " at: "
// << path << image.save(path + "/test_" + QString::number(i) + ".png");
filter->decoder.decodeImage(image);
}

View File

@ -0,0 +1,107 @@
#ifndef QZXingFilter_H
#define QZXingFilter_H
#include <QObject>
#include <QAbstractVideoFilter>
#include <QZXing.h>
///
/// References:
///
/// https://blog.qt.io/blog/2015/03/20/introducing-video-filters-in-qt-multimedia/
/// http://doc.qt.io/qt-5/qabstractvideofilter.html
/// http://doc.qt.io/qt-5/qml-qtmultimedia-videooutput.html#filters-prop
/// http://doc.qt.io/qt-5/qvideofilterrunnable.html
/// http://doc.qt.io/qt-5/qtconcurrent-runfunction-main-cpp.html
///
/// This is used to store a QVideoFrame info while we are searching the image for QRCodes.
struct SimpleVideoFrame
{
QByteArray data;
QSize size;
QVideoFrame::PixelFormat pixelFormat;
SimpleVideoFrame()
: size{0,0}
, pixelFormat{QVideoFrame::Format_Invalid}
{}
SimpleVideoFrame(QVideoFrame & frame)
{
copyData(frame);
}
void copyData(QVideoFrame & frame)
{
frame.map(QAbstractVideoBuffer::ReadOnly);
/// Copy video frame bytes to this.data
/// This is made to try to get a better performance (less memory allocation, faster unmap)
/// Any other task is performed in a QFuture task, as we want to leave the UI thread asap
if(data.size() != frame.mappedBytes())
{
qDebug() << "needed to resize";
data.resize(frame.mappedBytes());
}
memcpy(data.data(), frame.bits(), frame.mappedBytes());
size = frame.size();
pixelFormat = frame.pixelFormat();
frame.unmap();
}
};
/// Video filter is the filter that has to be registered in C++, instantiated and attached in QML
class QZXingFilter : public QAbstractVideoFilter
{
friend class QZXingFilterRunnable;
Q_OBJECT
Q_PROPERTY(bool decoding READ isDecoding NOTIFY isDecodingChanged)
signals:
void isDecodingChanged();
void decodingFinished(bool succeeded, int decodeTime);
void tagFound(QString tag);
void decodingStarted();
private slots:
void handleDecodingStarted();
void handleDecodingFinished(bool succeeded);
void handleTagFound(QString tag);
private: /// Attributes
QZXing decoder;
bool decoding;
SimpleVideoFrame frame;
QFuture<void> processThread;
public: /// Methods
explicit QZXingFilter(QObject *parent = 0);
virtual ~QZXingFilter();
bool isDecoding() {return decoding; }
QVideoFilterRunnable * createFilterRunnable();
static QImage fromBGRAtoARGB(uchar * data, QSize size, QVideoFrame::PixelFormat pixelFormat);
};
/// A new Runnable is created everytime the filter gets a new frame
class QZXingFilterRunnable : public QObject, public QVideoFilterRunnable
{
Q_OBJECT
public:
explicit QZXingFilterRunnable(QZXingFilter * filter);
/// This method is called whenever we get a new frame. It runs in the UI thread.
QVideoFrame run(QVideoFrame * input, const QVideoSurfaceFormat &surfaceFormat, RunFlags flags);
void processVideoFrameProbed(SimpleVideoFrame & videoFrame);
private:
QZXingFilter * filter;
};
#endif // QZXingFilter_H

View File

@ -0,0 +1,21 @@
TEMPLATE = app
QT += qml quick widgets multimedia
CONFIG += c++11
SOURCES += main.cpp \
QZXingFilter.cpp
RESOURCES += qml.qrc
# Additional import path used to resolve QML modules in Qt Creator's code model
QML_IMPORT_PATH =
include(../../src/QZXing.pri)
# Default rules for deployment.
include(deployment.pri)
HEADERS += \
QZXingFilter.h

View File

@ -0,0 +1,13 @@
unix:!android {
isEmpty(target.path) {
qnx {
target.path = /tmp/$${TARGET}/bin
} else {
target.path = /opt/$${TARGET}/bin
}
export(target.path)
}
INSTALLS += target
}
export(INSTALLS)

View File

@ -0,0 +1,28 @@
#include <QGuiApplication>
#include <QQmlApplicationEngine>
#include <QQmlContext>
#include <QDebug>
#include <qzxing.h>
#include <Qt>
#include "QZXingFilter.h"
#if defined(Q_OS_IOS)
/// Reference for iOS entry point:
/// http://stackoverflow.com/questions/25353686/you-are-creating-qapplication-before-calling-uiapplicationmain-error-on-ios
extern "C" int qtmn(int argc, char **argv)
#else
int main(int argc, char *argv[])
#endif
{
QCoreApplication::setAttribute(Qt::AA_UseHighDpiPixmaps);
QGuiApplication app(argc, argv);
QQmlApplicationEngine engine;
qmlRegisterType<QZXingFilter>("QZXing", 2, 3, "QZXingFilter");
engine.load(QUrl(QStringLiteral("qrc:/main.qml")));
return app.exec();
}

View File

@ -0,0 +1,92 @@
import QtQuick 2.5
import QtQuick.Controls 1.4
import QtQuick.Layouts 1.1
import QtQuick.Dialogs 1.2
import QtMultimedia 5.5
import QZXing 2.3
ApplicationWindow
{
id: window
visible: true
width: 640
height: 480
title: "Qt QZXing Filter Test"
Rectangle
{
id: bgRect
color: "white"
anchors.fill: videoOutput
}
Text
{
id: text1
wrapMode: Text.Wrap
font.pixelSize: 20
anchors.top: parent.top
anchors.left: parent.left
z: 50
text: "......"
}
Camera
{
id:camera
}
VideoOutput
{
id: videoOutput
source: camera
anchors.top: text1.bottom
anchors.bottom: text2.top
anchors.left: parent.left
anchors.right: parent.right
// autoOrientation: true
filters: [ zxingFilter ]
}
QZXingFilter
{
id: zxingFilter
onDecodingStarted:
{
}
onDecodingFinished:
{
if(succeeded)
{
}
else
{
}
}
onTagFound:
{
console.log("--!!--");
console.log(tag);
text1.text = "--00--";
text2.text = tag;
}
}
Text
{
id: text2
wrapMode: Text.Wrap
font.pixelSize: 20
anchors.bottom: parent.bottom
anchors.left: parent.left
z: 50
text: "Nothing yet..."
}
}

View File

@ -0,0 +1,5 @@
<RCC>
<qresource prefix="/">
<file>main.qml</file>
</qresource>
</RCC>