Commit 9abe0505 authored by Jeff Niu's avatar Jeff Niu

Webcam view from opencv

parent 268836ed
......@@ -102,6 +102,9 @@ From a fresh install, you will need these packages
sudo apt install build-essential cmake python3 python3-dev qt5-default libudev-dev git
```
OpenCV is a requirement. Download and build OpenCV 3.3 from the open-source repo, or
follow [these instructions](https://github.com/BVLC/caffe/wiki/OpenCV-3.3-Installation-Guide-on-Ubuntu-16.04).
Then clone the repository and build with
```bash
git clone https://github.com/uwnrg/minotaur-cpp.git
......
......@@ -12,7 +12,7 @@ class ActionAbout : public QDialog
Q_OBJECT
public:
explicit ActionAbout(QWidget *parent = 0);
explicit ActionAbout(QWidget *parent = nullptr);
~ActionAbout();
private:
......
#include "camera.h"
#include <QDebug>
#include <QtGui/QPainter>
#include <QtWidgets/QVBoxLayout>
Capture::Capture(QObject *parent)
: QObject(parent) {}
void Capture::start(int cam) {
if (!m_video_capture) {
m_video_capture.reset(new cv::VideoCapture(cam));
}
if (m_video_capture->isOpened()) {
m_timer.start(0, this);
Q_EMIT started();
}
}
void Capture::stop() {
m_timer.stop();
}
void Capture::timerEvent(QTimerEvent *ev) {
if (ev->timerId() != m_timer.timerId()) {
return;
}
cv::Mat frame;
if (!m_video_capture->read(frame)) {
m_timer.stop();
return;
}
Q_EMIT matReady(frame);
}
Converter::Converter(QObject *parent)
: QObject(parent) {}
void Converter::setProcessAll(bool process_all) {
m_process_all = process_all;
}
void Converter::processFrame(const cv::Mat &frame) {
if (m_process_all) {
process(frame);
} else {
queue(frame);
}
}
void Converter::matDelete(void *mat) {
delete static_cast<cv::Mat *>(mat);
}
void Converter::queue(const cv::Mat &frame) {
/*if (m_frame.empty()) {
qDebug() << "OpenCV Image Converter dropped a frame";
}*/
m_frame = frame;
if (!m_timer.isActive()) {
m_timer.start(0, this);
}
}
void Converter::process(cv::Mat frame) {
cv::resize(frame, frame, cv::Size(), 1, 1, cv::INTER_AREA);
cv::cvtColor(frame, frame, CV_BGR2RGB);
const QImage image(
frame.data, frame.cols, frame.rows, static_cast<int>(frame.step),
QImage::Format_RGB888, &matDelete, new cv::Mat(frame)
);
Q_ASSERT(image.constBits() == frame.data);
emit imageReady(image);
}
void Converter::timerEvent(QTimerEvent *ev) {
if (ev->timerId() != m_timer.timerId()) {
return;
}
process(m_frame);
m_frame.release();
m_timer.stop();
}
ImageViewer::ImageViewer(QWidget *parent)
: QWidget(parent) {
setAttribute(Qt::WA_OpaquePaintEvent);
}
void ImageViewer::setImage(const QImage &img) {
/*if (m_img.isNull()) {
qDebug() << "OpenCV Image Viewer dropped a frame";
}*/
m_img = img;
if (m_img.size() != size()) {
setFixedSize(m_img.size());
}
update();
}
void ImageViewer::paintEvent(QPaintEvent *) {
QPainter painter(this);
painter.drawImage(0, 0, m_img);
m_img = {};
}
IThread::~IThread() {
quit();
wait();
}
CameraDisplay::CameraDisplay(QWidget *parent, int camera)
: QDialog(parent),
m_camera(camera) {
m_layout = new QVBoxLayout(this);
m_image_viewer = new ImageViewer(this);
setLayout(m_layout);
m_layout->addWidget(m_image_viewer);
}
CameraDisplay::~CameraDisplay() {
delete m_layout;
delete m_image_viewer;
}
void CameraDisplay::setCamera(int camera) {
m_camera = camera;
}
int CameraDisplay::getCamera() {
return m_camera;
}
void CameraDisplay::setVisible(bool visible) {
if (visible) {
m_converter.setProcessAll(false);
m_capture_thread.start();
m_converter_thread.start();
m_capture.moveToThread(&m_capture_thread);
m_converter.moveToThread(&m_converter_thread);
QObject::connect(&m_capture, &Capture::matReady, &m_converter, &Converter::processFrame);
QObject::connect(&m_converter, &Converter::imageReady, m_image_viewer, &ImageViewer::setImage);
QMetaObject::invokeMethod(&m_capture, "start");
}
setFixedSize(800, 600);
QDialog::setVisible(visible);
}
void CameraDisplay::reject() {
QDialog::reject();
}
#ifndef MINOTAUR_CPP_CAMERA_H
#define MINOTAUR_CPP_CAMERA_H
#include <opencv2/opencv.hpp>
#include <QWidget>
#include <QDialog>
#include <QThread>
#include <QtCore/QBasicTimer>
#include <QtCore/QArgument>
#include <QtCore/QMetaType>
#include <QtCore/QScopedPointer>
#include <QtCore/QTimerEvent>
#include <QtGui/QImage>
#include <QtGui/QPaintEvent>
#include <QtWidgets/QVBoxLayout>
Q_DECLARE_METATYPE(cv::Mat);
class Capture : public QObject {
Q_OBJECT
public:
explicit Capture(QObject *parent = nullptr);
Q_SIGNAL void started();
Q_SIGNAL void matReady(const cv::Mat &);
Q_SLOT void start(int cam = 0);
Q_SLOT void stop();
private:
void timerEvent(QTimerEvent *ev) override;
QBasicTimer m_timer;
QScopedPointer<cv::VideoCapture> m_video_capture;
};
class Converter : public QObject {
Q_OBJECT
public:
explicit Converter(QObject *parent = nullptr);
void setProcessAll(bool process_all);
Q_SIGNAL void imageReady(const QImage &);
Q_SLOT void processFrame(const cv::Mat &frame);
private:
static void matDelete(void *mat);
void queue(const cv::Mat &frame);
void process(cv::Mat frame);
void timerEvent(QTimerEvent *ev) override;
QBasicTimer m_timer;
cv::Mat m_frame;
bool m_process_all = true;
};
class ImageViewer : public QWidget {
Q_OBJECT
public:
explicit ImageViewer(QWidget *parent = nullptr);
Q_SLOT void setImage(const QImage &img);
private:
void paintEvent(QPaintEvent *) override;
QImage m_img;
};
class IThread final : public QThread {
public:
~IThread() override;
};
class CameraDisplay : public QDialog {
Q_OBJECT
public:
explicit CameraDisplay(QWidget *parent = nullptr, int camera = 0);
~CameraDisplay() override;
void setCamera(int camera);
int getCamera();
protected:
void setVisible(bool visible) override;
void reject() override;
private:
QVBoxLayout *m_layout;
ImageViewer *m_image_viewer;
int m_camera;
Capture m_capture;
Converter m_converter;
IThread m_capture_thread;
IThread m_converter_thread;
};
#endif //MINOTAUR_CPP_CAMERA_H
......@@ -11,9 +11,9 @@ MainWindow::MainWindow(QWidget *parent, const char *) :
//Set up logger
Logger::setStream(getLogView());
m_actuator = std::shared_ptr<Actuator>(new Actuator);
m_solenoid = std::shared_ptr<Solenoid>(new Solenoid);
m_simulator = std::shared_ptr<Simulator>(new Simulator(1, -1));
m_actuator = std::make_shared<Actuator>();
m_solenoid = std::make_shared<Solenoid>();
m_simulator = std::make_shared<Simulator>(1, -1);
m_controller = m_solenoid;
m_controller_type = Controller::Type::SOLENOID;
......@@ -22,11 +22,11 @@ MainWindow::MainWindow(QWidget *parent, const char *) :
PythonEngine::getInstance().append_module("emb", &Embedded::PyInit_emb);
// Setup subwindows
actuator_setup_window = new ActuatorSetup(m_actuator, this);
simulator_window = new SimulatorWindow(m_simulator, this);
script_window = new ScriptWindow(this);
//m_simulator->setSimulatorScene(simulator_window->getSimulatorScene());
action_about_window = new ActionAbout();
m_actuator_setup_window = new ActuatorSetup(m_actuator, this);
m_simulator_window = new SimulatorWindow(m_simulator, this);
m_script_window = new ScriptWindow(this);
m_about_window = new ActionAbout(this);
m_camera_display = new CameraDisplay(this);
// Setup slot connections
connect(ui->setup_actuator, SIGNAL(triggered()), this, SLOT(openActuatorSetup()));
......@@ -34,6 +34,7 @@ MainWindow::MainWindow(QWidget *parent, const char *) :
connect(ui->switch_to_simulator_mode, SIGNAL(triggered()), this, SLOT(switchToSimulator()));
connect(ui->start_python_interpreter, SIGNAL(triggered()), this, SLOT(openPythonInterpreter()));
connect(ui->actionAbout, SIGNAL(triggered()), this, SLOT(openActionAbout()));
connect(ui->actionWebcam_View, SIGNAL(triggered()), this, SLOT(openCameraDisplay()));
// setup focus and an event filter to capture key events
this->installEventFilter(this);
......@@ -96,9 +97,11 @@ void MainWindow::mousePressEvent(QMouseEvent *) {
MainWindow::~MainWindow() {
// Destroy all subwindows
delete actuator_setup_window;
delete simulator_window;
delete script_window;
delete m_actuator_setup_window;
delete m_about_window;
delete m_simulator_window;
delete m_script_window;
delete m_camera_display;
delete ui;
}
......@@ -124,13 +127,13 @@ void MainWindow::switchControllerTo(Controller::Type const type) {
// Switch to the actuator controller and hide the simulation window
Logger::log("Switching to ACTUATOR", Logger::INFO);
m_controller = m_actuator;
if (simulator_window->isVisible()) { simulator_window->hide(); }
if (m_simulator_window->isVisible()) { m_simulator_window->hide(); }
break;
case Controller::Type::SIMULATOR:
// Switch to the simulator controller and show the simulator window
Logger::log("Switching to SIMULATOR", Logger::INFO);
m_controller = m_simulator;
if (!simulator_window->isVisible()) { simulator_window->show(); }
if (!m_simulator_window->isVisible()) { m_simulator_window->show(); }
break;
default:
break;
......@@ -138,13 +141,17 @@ void MainWindow::switchControllerTo(Controller::Type const type) {
}
void MainWindow::openActuatorSetup() {
actuator_setup_window->show();
m_actuator_setup_window->show();
}
void MainWindow::openPythonInterpreter() {
script_window->show();
m_script_window->show();
}
void MainWindow::openActionAbout() {
action_about_window->show();
m_about_window->show();
}
void MainWindow::openCameraDisplay() {
m_camera_display->show();
}
......@@ -16,6 +16,7 @@
#include "simulatorwindow.h"
#include "actionabout.h"
#include "scriptwindow.h"
#include "camera.h"
#define DEFAULT_TITLE "minotaur"
......@@ -43,11 +44,14 @@ public Q_SLOTS:
void openPythonInterpreter();
void openActionAbout();
void openCameraDisplay();
inline void switchToActuator() { switchControllerTo(Controller::Type::ACTUATOR); }
inline void switchToSimulator() { switchControllerTo(Controller::Type::SIMULATOR); }
void openActionAbout();
private Q_SLOTS:
......@@ -59,11 +63,15 @@ private Q_SLOTS:
private:
Ui::MainWindow *ui;
ActuatorSetup *actuator_setup_window;
ScriptWindow *script_window;
SimulatorWindow *simulator_window;
ActuatorSetup *m_actuator_setup_window;
ScriptWindow *m_script_window;
SimulatorWindow *m_simulator_window;
ActionAbout *m_about_window;
CameraDisplay *m_camera_display;
Controller::Type m_controller_type;
ActionAbout *action_about_window;
std::shared_ptr<Controller> m_controller;
std::shared_ptr<Actuator> m_actuator;
std::shared_ptr<Solenoid> m_solenoid;
......
......@@ -283,7 +283,7 @@ p, li { white-space: pre-wrap; }
</action>
<action name="actionWebcam_View">
<property name="enabled">
<bool>false</bool>
<bool>true</bool>
</property>
<property name="text">
<string>Webcam View</string>
......
#include "code/interpreter/pythonengine.h"
#include "gui/mainwindow.h"
#include <QApplication>
#include <opencv2/core/mat.hpp>
#include <QCameraInfo>
#include <QCameraViewfinder>
#include <opencv2/videoio.hpp>
#include <opencv2/opencv.hpp>
int thresh = 0;
int N = 50;
using std::vector;
using namespace cv;
static double angle(Point pt1, Point pt2, Point pt0) {
double dx1 = pt1.x - pt0.x;
double dy1 = pt1.y - pt0.y;
double dx2 = pt2.x - pt0.x;
double dy2 = pt2.y - pt0.y;
return (dx1 * dx2 + dy1 * dy2) / sqrt((dx1 * dx1 + dy1 * dy1) * (dx2 * dx2 + dy2 * dy2) + 1e-10);
}
// returns sequence of squares detected on the image.
// the sequence is stored in the specified memory storage
static void findSquares(const Mat &image, vector<vector<Point> > &squares) {
squares.clear();
Mat pyr, timg, gray0(image.size(), CV_8U), gray;
// down-scale and upscale the image to filter out the noise
pyrDown(image, pyr, Size(image.cols / 2, image.rows / 2));
pyrUp(pyr, timg, image.size());
vector<vector<Point> > contours;
// find squares in every color plane of the image
for (int c = 0; c < 3; c++) {
int ch[] = {c, 0};
mixChannels(&timg, 1, &gray0, 1, ch, 1);
// try several threshold levels
for (int l = 0; l < N; l++) {
// hack: use Canny instead of zero threshold level.
// Canny helps to catch squares with gradient shading
if (l == 0) {
// apply Canny. Take the upper threshold from slider
// and set the lower to 0 (which forces edges merging)
Canny(gray0, gray, 0, thresh, 5);
// dilate canny output to remove potential
// holes between edge segments
dilate(gray, gray, Mat(), Point(-1, -1));
} else {
// apply threshold if l!=0:
// tgray(x,y) = gray(x,y) < (l+1)*255/N ? 255 : 0
gray = gray0 >= (l + 1) * 255 / N;
}
// find contours and store them all as a list
findContours(gray, contours, RETR_LIST, CHAIN_APPROX_SIMPLE);
vector<Point> approx;
// test each contour
for (size_t i = 0; i < contours.size(); i++) {
// approximate contour with accuracy proportional
// to the contour perimeter
approxPolyDP(Mat(contours[i]), approx, arcLength(Mat(contours[i]), true) * 0.02, true);
// square contours should have 4 vertices after approximation
// relatively large area (to filter out noisy contours)
// and be convex.
// Note: absolute value of an area is used because
// area may be positive or negative - in accordance with the
// contour orientation
if (approx.size() == 4 &&
fabs(contourArea(Mat(approx))) > 1000 &&
isContourConvex(Mat(approx))) {
double maxCosine = 0;
for (int j = 2; j < 5; j++) {
// find the maximum cosine of the angle between joint edges
double cosine = fabs(angle(approx[j % 4], approx[j - 2], approx[j - 1]));
maxCosine = MAX(maxCosine, cosine);
}
// if cosines of all angles are small
// (all angles are ~90 degree) then write quandrange
// vertices to resultant sequence
if (maxCosine < 0.3)
squares.push_back(approx);
}
}
}
}
}
// the function draws all the squares in the image
static void drawSquares(Mat &image, const vector<vector<Point> > &squares) {
for (size_t i = 0; i < squares.size(); i++) {
const Point *p = &squares[i][0];
int n = (int) squares[i].size();
polylines(image, &p, &n, 1, true, Scalar(0, 255, 0), 3, LINE_AA);
}
}
#include "code/interpreter/pythonengine.h"
#include "gui/mainwindow.h"
int main(int argc, char *argv[]) {
/*QApplication app(argc, argv);
MainWindow *w = new MainWindow();
w->show();
return app.exec();*/
using namespace cv;
VideoCapture cap(0);
if (!cap.isOpened()) {
return 1;
}
//Mat edges;
namedWindow("edges", 1);
for (;;) {
Mat frame;
cap >> frame; // get a new frame from camera
//frame *= 25;
//cvtColor(frame, edges, COLOR_BGR2GRAY);
vector<vector<Point>> squares;
//GaussianBlur(edges, edges, Size(7, 7), 1.5, 1.5);
//Canny(edges, edges, 20, 100, 3);
//findSquares(edges, squares);
//drawSquares(frame, squares);
//std::cout << squares.size() << std::endl;
findSquares(frame, squares);
drawSquares(frame, squares);
imshow("frame", frame);
if (waitKey(1)) {
qRegisterMetaType<cv::Mat>();
QApplication app(argc, argv);
}
}
// the camera will be deinitialized automatically in VideoCapture destructor
return 0;
auto *w = new MainWindow();
w->show();
return app.exec();
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment