Merge branch 'develop' into feature/MovementCharacter

This commit is contained in:
SemvdH
2021-06-11 10:14:12 +02:00
committed by GitHub
36 changed files with 6346 additions and 4698 deletions

2
.gitignore vendored
View File

@@ -2,6 +2,8 @@
# Created by https://www.toptal.com/developers/gitignore/api/c++,visualstudio,visualstudiocode,opencv
# Edit at https://www.toptal.com/developers/gitignore?templates=c++,visualstudio,visualstudiocode,opencv
res/**
### C++ ###
# Prerequisites
*.d

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,6 @@
#pragma once
#include <iostream>
#include <glm/gtc/matrix_transform.hpp>
#include "../entities/entity.h"
@@ -15,6 +16,18 @@ namespace collision
{
glm::vec3 center_pos;
glm::vec3 size;
void SetRotation(float angle)
{
double sinTheta = glm::sin(glm::radians(angle));
double cosTheta = glm::cos(glm::radians(angle));
float x = size.x * cosTheta + size.z * sinTheta;
float z = size.z * cosTheta - size.x * sinTheta;
size.x = x < 0 ? -x : x;
size.z = z < 0 ? -z : z;
}
};
/*

View File

@@ -1,53 +0,0 @@
#include "FaceDetector.h"
/*
Author: Pierfrancesco Soffritti https://github.com/PierfrancescoSoffritti
*/
namespace computervision
{
Rect getFaceRect(Mat input);
String faceClassifierFileName = "res/haarcascade_frontalface_alt.xml";
CascadeClassifier faceCascadeClassifier;
FaceDetector::FaceDetector(void) {
if (!faceCascadeClassifier.load(faceClassifierFileName))
throw runtime_error("can't load file " + faceClassifierFileName);
}
void FaceDetector::removeFaces(Mat input, Mat output) {
vector<Rect> faces;
Mat frameGray;
cvtColor(input, frameGray, CV_BGR2GRAY);
equalizeHist(frameGray, frameGray);
faceCascadeClassifier.detectMultiScale(frameGray, faces, 1.1, 2, 0 | 2, Size(120, 120)); // HAAR_SCALE_IMAGE is 2
for (size_t i = 0; i < faces.size(); i++) {
rectangle(
output,
Point(faces[i].x, faces[i].y),
Point(faces[i].x + faces[i].width, faces[i].y + faces[i].height),
Scalar(0, 0, 0),
-1
);
}
}
Rect getFaceRect(Mat input) {
vector<Rect> faceRectangles;
Mat inputGray;
cvtColor(input, inputGray, CV_BGR2GRAY);
equalizeHist(inputGray, inputGray);
faceCascadeClassifier.detectMultiScale(inputGray, faceRectangles, 1.1, 2, 0 | 2, Size(120, 120)); // HAAR_SCALE_IMAGE is 2
if (faceRectangles.size() > 0)
return faceRectangles[0];
else
return Rect(0, 0, 1, 1);
}
}

View File

@@ -1,31 +0,0 @@
#pragma once
#include <opencv2/opencv.hpp>
#include <opencv2/imgproc/types_c.h>
#include <opencv2/objdetect.hpp>
#include <opencv2/core.hpp>
#include <opencv2/objdetect/objdetect.hpp>
/*
Author: Pierfrancesco Soffritti https://github.com/PierfrancescoSoffritti
*/
using namespace cv;
using namespace std;
namespace computervision
{
class FaceDetector {
public:
/**
* @brief Constructor for the class FaceDetector, loads training data from a file
*
*/
FaceDetector(void);
/**
* @brief Detects faces on an image and blocks them with a black rectangle
*
* @param input Input image
* @param output Output image
*/
void removeFaces(Mat input, Mat output);
};
}

View File

@@ -14,6 +14,7 @@
namespace computervision
{
FingerCount::FingerCount(void) {
color_blue = Scalar(255, 0, 0);
color_green = Scalar(0, 255, 0);
@@ -35,9 +36,6 @@ namespace computervision
if (input_image.channels() != 1)
return contours_image;
vector<vector<Point>> contours;
vector<Vec4i> hierarchy;
findContours(input_image, contours, hierarchy, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_NONE);
// we need at least one contour to work
@@ -45,7 +43,7 @@ namespace computervision
return contours_image;
// find the biggest contour (let's suppose it's our hand)
int biggest_contour_index = -1;
biggest_contour_index = -1;
double biggest_area = 0.0;
for (int i = 0; i < contours.size(); i++) {
@@ -156,6 +154,11 @@ namespace computervision
return contours_image;
}
void FingerCount::DrawHandContours(Mat& image)
{
drawContours(image, contours, biggest_contour_index, color_green, 2, 8, hierarchy);
}
int FingerCount::getAmountOfFingers()
{
return amount_of_fingers;

View File

@@ -31,7 +31,15 @@ namespace computervision
*/
int getAmountOfFingers();
void DrawHandContours(Mat& image);
private:
int biggest_contour_index;
vector<vector<Point>> contours;
vector<Vec4i> hierarchy;
// colors to use
Scalar color_blue;
Scalar color_green;
@@ -115,5 +123,7 @@ namespace computervision
* @param with_numbers if the numbers should be drawn with the points
*/
void drawVectorPoints(Mat image, vector<Point> points, Scalar color, bool with_numbers);
};
}

View File

@@ -0,0 +1,105 @@
#include "HandDetectRegion.h"
namespace computervision
{
HandDetectRegion::HandDetectRegion(std::string id,int x_pos, int y_pos, int width, int height)
{
region_id = id;
start_x_pos = x_pos;
start_y_pos = y_pos;
region_width = width;
region_height = height;
hand_mask_generated = false;
hand_present = false;
}
void HandDetectRegion::DetectHand(cv::Mat& camera_frame)
{
Mat input_frame = GenerateHandMaskSquare(camera_frame);
frame_out = input_frame.clone();
// detect skin color
skin_detector.drawSkinColorSampler(camera_frame,start_x_pos,start_y_pos,region_width,region_height);
// remove background from image
foreground = background_remover.getForeground(input_frame);
// detect the hand contours
handMask = skin_detector.getSkinMask(foreground);
// draw the hand rectangle on the camera input, and draw text showing if the hand is open or closed.
DrawHandMask(&camera_frame);
//imshow("output" + region_id, frame_out);
//imshow("foreground" + region_id, foreground);
//imshow("handMask" + region_id, handMask);
/*imshow("handDetection", fingerCountDebug);*/
hand_present = hand_calibrator.CheckIfHandPresent(handMask,handcalibration::HandDetectionType::GAME);
//std::string text = (hand_present ? "hand" : "no");
//cv::putText(camera_frame, text, cv::Point(start_x_pos, start_y_pos), cv::FONT_HERSHEY_COMPLEX, 2.0, cv::Scalar(0, 255, 255), 2);
hand_calibrator.SetHandPresent(hand_present);
//draw black rectangle behind calibration information text
cv::rectangle(camera_frame, cv::Rect(0, camera_frame.rows - 55, 450, camera_frame.cols), cv::Scalar(0, 0, 0), -1);
hand_calibrator.DrawBackgroundSkinCalibrated(camera_frame);
}
cv::Mat HandDetectRegion::GenerateHandMaskSquare(cv::Mat img)
{
cv::Mat mask = cv::Mat::zeros(img.size(), img.type());
cv::Mat distance_img = cv::Mat::zeros(img.size(), img.type());
cv::rectangle(mask, cv::Rect(start_x_pos, start_y_pos, region_width, region_height), cv::Scalar(255, 255, 255), -1);
img.copyTo(distance_img, mask);
hand_mask_generated = true;
return distance_img;
}
bool HandDetectRegion::DrawHandMask(cv::Mat* input)
{
if (!hand_mask_generated) return false;
rectangle(*input, Rect(start_x_pos, start_y_pos, region_width, region_height), (hand_present ? Scalar(0, 255, 0) : Scalar(0,0,255)),2);
return true;
}
bool HandDetectRegion::IsHandPresent()
{
return hand_present;
}
void HandDetectRegion::CalibrateBackground()
{
std::cout << "calibrating background " << region_id << std::endl;
background_remover.calibrate(frame_out);
hand_calibrator.SetBackGroundCalibrated(true);
}
void HandDetectRegion::CalibrateSkin()
{
skin_detector.calibrate(frame_out);
hand_calibrator.SetSkinCalibration(true);
}
std::vector<int> HandDetectRegion::CalculateSkinTresholds()
{
std::cout << "calibrating skin " << region_id << std::endl;
hand_calibrator.SetSkinCalibration(true);
return skin_detector.calibrateAndReturn(frame_out);
}
void HandDetectRegion::setSkinTresholds(std::vector<int>& tresholds)
{
std::cout << "setting skin " << region_id << std::endl;
skin_detector.setTresholds(tresholds);
hand_calibrator.SetSkinCalibration(true);
}
}

View File

@@ -0,0 +1,56 @@
#pragma once
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
#include "async/StaticCameraInstance.h"
#include "calibration/HandCalibrator.h"
#include "BackgroundRemover.h"
#include "SkinDetector.h"
#include "FingerCount.h"
namespace computervision
{
class HandDetectRegion
{
public:
HandDetectRegion(std::string id,int x_pos, int y_pos, int width, int height);
void SetXPos(int x) { start_x_pos = x; }
void SetYPos(int y) { start_y_pos = y; }
int GetXPos() { return start_x_pos; }
int GetYPos() { return start_y_pos; }
void SetWidth(int width) { region_width = width; }
void SetHeigth(int height) { region_height = height; }
int GetWidth() { return region_width; }
int GetHeight() { return region_height; }
cv::Mat GenerateHandMaskSquare(cv::Mat img);
void DetectHand(cv::Mat& camera_frame);
bool IsHandPresent();
void CalibrateBackground();
void CalibrateSkin();
std::vector<int> CalculateSkinTresholds();
void setSkinTresholds(std::vector<int>& tresholds);
private:
int start_x_pos;
int start_y_pos;
int region_height;
int region_width;
bool hand_mask_generated;
bool hand_present;
cv::Mat frame, frame_out, handMask, foreground, fingerCountDebug;
BackgroundRemover background_remover;
SkinDetector skin_detector;
handcalibration::HandCalibrator hand_calibrator;
std::string region_id;
bool DrawHandMask(cv::Mat* input);
};
}

View File

@@ -6,117 +6,142 @@
#include "ObjectDetection.h"
#include "BackgroundRemover.h"
#include "SkinDetector.h"
#include "FaceDetector.h"
#include "FingerCount.h"
#include "async/StaticCameraInstance.h"
#include "calibration/HandCalibrator.h"
namespace computervision
{
cv::VideoCapture cap(0);
cv::Mat img, imgGray, img2, img2Gray, img3, img4;
cv::Mat img, img_gray, img2, img2_gray, img3, img4;
int handMaskStartXPos, handMaskStartYPos, handMaskWidth, handMaskHeight;
bool handMaskGenerated = false;
int hand_mask_start_x_pos, hand_mask_start_y_pos, hand_mask_width, hand_mask_height;
bool hand_mask_generated = false;
Mat frame, frameOut, handMask, foreground, fingerCountDebug;
BackgroundRemover backgroundRemover;
SkinDetector skinDetector;
FaceDetector faceDetector;
FingerCount fingerCount;
Mat frame, frame_out, handMask, foreground, fingerCountDebug;
BackgroundRemover background_remover;
SkinDetector skin_detector;
FingerCount finger_count;
handcalibration::HandCalibrator hand_calibrator;
cv::VideoCapture cap = static_camera::getCap();
ObjectDetection::ObjectDetection()
{
}
cv::Mat ObjectDetection::readCamera() {
cv::Mat ObjectDetection::ReadCamera() {
cap.read(img);
return img;
}
bool ObjectDetection::detectHand(Mat cameraFrame)
cv::VideoCapture ObjectDetection::GetCap()
{
Mat inputFrame = generateHandMaskSquare(cameraFrame);
frameOut = inputFrame.clone();
return cap;
}
bool ObjectDetection::DetectHand(Mat camera_frame, bool& hand_present)
{
Mat input_frame = GenerateHandMaskSquare(camera_frame);
frame_out = input_frame.clone();
// detect skin color
skinDetector.drawSkinColorSampler(frameOut);
skin_detector.drawSkinColorSampler(camera_frame);
// remove background from image
foreground = backgroundRemover.getForeground(inputFrame);
foreground = background_remover.getForeground(input_frame);
// detect the hand contours
handMask = skinDetector.getSkinMask(foreground);
handMask = skin_detector.getSkinMask(foreground);
// count the amount of fingers and put the info on the matrix
fingerCountDebug = fingerCount.findFingersCount(handMask, frameOut);
fingerCountDebug = finger_count.findFingersCount(handMask, frame_out);
// get the amount of fingers
int fingers_amount = fingerCount.getAmountOfFingers();
int fingers_amount = finger_count.getAmountOfFingers();
// draw the hand rectangle on the camera input, and draw text showing if the hand is open or closed.
drawHandMaskRect(&cameraFrame);
string hand_text = fingers_amount > 0 ? "open" : "closed";
putText(cameraFrame,hand_text, Point(10, 75), FONT_HERSHEY_PLAIN, 2.0, Scalar(255, 0, 255),3);
imshow("camera", cameraFrame);
DrawHandMask(&camera_frame);
/* imshow("output", frameOut);
hand_calibrator.SetAmountOfFingers(fingers_amount);
finger_count.DrawHandContours(camera_frame);
hand_calibrator.DrawHandCalibrationText(camera_frame);
imshow("camera", camera_frame);
/*imshow("output", frame_out);
imshow("foreground", foreground);
imshow("handMask", handMask);
imshow("handDetection", fingerCountDebug);*/
hand_present = hand_calibrator.CheckIfHandPresent(handMask,handcalibration::HandDetectionType::MENU);
hand_calibrator.SetHandPresent(hand_present);
int key = waitKey(1);
if (key == 98) // b, calibrate the background
backgroundRemover.calibrate(inputFrame);
{
background_remover.calibrate(input_frame);
hand_calibrator.SetBackGroundCalibrated(true);
}
else if (key == 115) // s, calibrate the skin color
skinDetector.calibrate(inputFrame);
{
skin_detector.calibrate(input_frame);
hand_calibrator.SetSkinCalibration(true);
}
return fingers_amount > 0;
}
void ObjectDetection::calculateDifference()
void ObjectDetection::CalculateDifference()
{
cap.read(img);
cap.read(img2);
cv::cvtColor(img, imgGray, cv::COLOR_RGBA2GRAY);
cv::cvtColor(img2, img2Gray, cv::COLOR_RGBA2GRAY);
cv::cvtColor(img, img_gray, cv::COLOR_RGBA2GRAY);
cv::cvtColor(img2, img2_gray, cv::COLOR_RGBA2GRAY);
cv::absdiff(imgGray, img2Gray, img3);
cv::absdiff(img_gray, img2_gray, img3);
cv::threshold(img3, img4, 50, 170, cv::THRESH_BINARY);
imshow("threshold", img4);
}
cv::Mat ObjectDetection::generateHandMaskSquare(cv::Mat img)
cv::Mat ObjectDetection::GenerateHandMaskSquare(cv::Mat img)
{
handMaskStartXPos = 20;
handMaskStartYPos = img.rows / 5;
handMaskWidth = img.cols / 3;
handMaskHeight = img.cols / 3;
hand_mask_start_x_pos = 20;
hand_mask_start_y_pos = img.rows / 5;
hand_mask_width = img.cols / 3;
hand_mask_height = img.cols / 3;
cv::Mat mask = cv::Mat::zeros(img.size(), img.type());
cv::Mat dstImg = cv::Mat::zeros(img.size(), img.type());
cv::Mat distance_img = cv::Mat::zeros(img.size(), img.type());
cv::rectangle(mask, Rect(handMaskStartXPos, handMaskStartYPos, handMaskWidth, handMaskHeight), Scalar(255, 255, 255), -1);
cv::rectangle(mask, Rect(hand_mask_start_x_pos, hand_mask_start_y_pos, hand_mask_width, hand_mask_height), Scalar(255, 255, 255), -1);
img.copyTo(dstImg, mask);
img.copyTo(distance_img, mask);
handMaskGenerated = true;
return dstImg;
hand_mask_generated = true;
return distance_img;
}
bool ObjectDetection::drawHandMaskRect(cv::Mat* input)
bool ObjectDetection::DrawHandMask(cv::Mat* input)
{
if (!handMaskGenerated) return false;
rectangle(*input, Rect(handMaskStartXPos, handMaskStartYPos, handMaskWidth, handMaskHeight), Scalar(255, 255, 255));
if (!hand_mask_generated) return false;
rectangle(*input, Rect(hand_mask_start_x_pos, hand_mask_start_y_pos, hand_mask_width, hand_mask_height), Scalar(255, 255, 255));
return true;
}
void ObjectDetection::showWebcam()
void ObjectDetection::ShowWebcam()
{
imshow("Webcam image", img);
}

View File

@@ -27,13 +27,13 @@ namespace computervision
* @brief Displays an image of the current webcam-footage
*
*/
void showWebcam();
void ShowWebcam();
/**
* @brief Calculates the difference between two images
* and outputs an image that only shows the difference
*
*/
void calculateDifference();
void CalculateDifference();
/**
* @brief generates the square that will hold the mask in which the hand will be detected.
@@ -41,29 +41,51 @@ namespace computervision
* @param img the current camear frame
* @return a matrix containing the mask
*/
cv::Mat generateHandMaskSquare(cv::Mat img);
cv::Mat GenerateHandMaskSquare(cv::Mat img);
/**
* @brief reads the camera and returns it in a matrix.
*
* @return the camera frame in a matrix
*/
cv::Mat readCamera();
cv::Mat ReadCamera();
/**
* @brief detects a hand based on the given hand mask input frame.
*
* @param inputFrame the input frame from the camera
* @param hand_present boolean that will hold true if the hand is detected, false if not.
* @return true if hand is open, false if hand is closed
*/
bool detectHand(cv::Mat cameraFrame);
bool DetectHand(cv::Mat camera_frame, bool& hand_present);
/**
* @brief draws the hand mask rectangle on the given input matrix.
*
* @param input the input matrix to draw the rectangle on
*/
bool drawHandMaskRect(cv::Mat *input);
bool DrawHandMask(cv::Mat *input);
/**
* @brief checks if the hand of the user is open.
*
* @return true if the hand is open, false if not.
*/
bool IsHandOpen();
/**
* @brief checks whether the hand is held within the detection square.
*
* @return true if the hand is in the detection square, false if not.
*/
bool IsHandPresent();
cv::VideoCapture GetCap();
private:
bool is_hand_open;
bool is_hand_present;
};

View File

@@ -0,0 +1,108 @@
#include "OpenPoseVideo.h"
using namespace std;
using namespace cv;
using namespace cv::dnn;
namespace computervision
{
#define MPI
#ifdef MPI
const int POSE_PAIRS[7][2] =
{
{0,1}, {1,2}, {2,3},
{3,4}, {1,5}, {5,6},
{6,7}
};
string protoFile = "res/pose/mpi/pose_deploy_linevec_faster_4_stages.prototxt";
string weightsFile = "res/pose/mpi/pose_iter_160000.caffemodel";
int nPoints = 8;
#endif
#ifdef COCO
const int POSE_PAIRS[17][2] =
{
{1,2}, {1,5}, {2,3},
{3,4}, {5,6}, {6,7},
{1,8}, {8,9}, {9,10},
{1,11}, {11,12}, {12,13},
{1,0}, {0,14},
{14,16}, {0,15}, {15,17}
};
string protoFile = "pose/coco/pose_deploy_linevec.prototxt";
string weightsFile = "pose/coco/pose_iter_440000.caffemodel";
int nPoints = 18;
#endif
Net net;
void OpenPoseVideo::setup() {
net = readNetFromCaffe(protoFile, weightsFile);
net.setPreferableBackend(DNN_TARGET_CPU);
}
void OpenPoseVideo::movementSkeleton(Mat& inputImage, std::function<void(std::vector<Point>&, cv::Mat& poinst_on_image)> f) {
std::cout << "movement skeleton start" << std::endl;
int inWidth = 368;
int inHeight = 368;
float thresh = 0.01;
Mat frame;
int frameWidth = inputImage.size().width;
int frameHeight = inputImage.size().height;
double t = (double)cv::getTickCount();
std::cout << "reading input image and blob" << std::endl;
frame = inputImage;
Mat inpBlob = blobFromImage(frame, 1.0 / 255, Size(inWidth, inHeight), Scalar(0, 0, 0), false, false);
std::cout << "done reading image and blob" << std::endl;
net.setInput(inpBlob);
std::cout << "done setting input to net" << std::endl;
Mat output = net.forward();
std::cout << "time took to set input and forward: " << t << std::endl;
int H = output.size[2];
int W = output.size[3];
std::cout << "about to find position of boxy parts" << std::endl;
// find the position of the body parts
vector<Point> points(nPoints);
for (int n = 0; n < nPoints; n++)
{
// Probability map of corresponding body's part.
Mat probMap(H, W, CV_32F, output.ptr(0, n));
Point2f p(-1, -1);
Point maxLoc;
double prob;
minMaxLoc(probMap, 0, &prob, 0, &maxLoc);
if (prob > thresh)
{
p = maxLoc;
p.x *= (float)frameWidth / W;
p.y *= (float)frameHeight / H;
circle(frame, cv::Point((int)p.x, (int)p.y), 8, Scalar(0, 255, 255), -1);
cv::putText(frame, cv::format("%d", n), cv::Point((int)p.x, (int)p.y), cv::FONT_HERSHEY_COMPLEX, 1.1, cv::Scalar(0, 0, 255), 2);
}
points[n] = p;
}
cv::putText(frame, cv::format("time taken = %.2f sec", t), cv::Point(50, 50), cv::FONT_HERSHEY_COMPLEX, .8, cv::Scalar(255, 50, 0), 2);
std::cout << "time taken: " << t << std::endl;
//imshow("Output-Keypoints", frame);
//imshow("Output-Skeleton", frame);
std::cout << "about to call points receiving method" << std::endl;
f(points,frame);
}
}

View File

@@ -0,0 +1,19 @@
#pragma once
#include <opencv2/dnn.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/highgui.hpp>
#include <iostream>
using namespace cv;
namespace computervision
{
class OpenPoseVideo{
private:
public:
void movementSkeleton(Mat& inputImage, std::function<void(std::vector<Point>&, cv::Mat& poinst_on_image)> f);
void setup();
};
}

View File

@@ -1,4 +1,5 @@
#include "SkinDetector.h"
#include <iostream>
/*
Author: Pierfrancesco Soffritti https://github.com/PierfrancescoSoffritti
@@ -23,7 +24,7 @@ namespace computervision
int frameWidth = input.size().width, frameHeight = input.size().height;
int rectangleSize = 25;
Scalar rectangleColor = Scalar(255, 0, 255);
Scalar rectangleColor = Scalar(0, 255, 255);
skinColorSamplerRectangle1 = Rect(frameWidth / 5, frameHeight / 2, rectangleSize, rectangleSize);
skinColorSamplerRectangle2 = Rect(frameWidth / 5, frameHeight / 3, rectangleSize, rectangleSize);
@@ -41,6 +42,29 @@ namespace computervision
);
}
void SkinDetector::drawSkinColorSampler(Mat input,int x, int y,int width, int height) {
int frameWidth = width, frameHeight = height;
int rectangleSize = 25;
Scalar rectangleColor = Scalar(0, 255, 255);
skinColorSamplerRectangle1 = Rect(frameWidth / 5 + x, frameHeight / 2 + y, rectangleSize, rectangleSize);
skinColorSamplerRectangle2 = Rect(frameWidth / 5 + x, frameHeight / 3 + y, rectangleSize, rectangleSize);
rectangle(
input,
skinColorSamplerRectangle1,
rectangleColor
);
rectangle(
input,
skinColorSamplerRectangle2,
rectangleColor
);
}
void SkinDetector::calibrate(Mat input) {
Mat hsvInput;
@@ -54,6 +78,19 @@ namespace computervision
calibrated = true;
}
std::vector<int> SkinDetector::calibrateAndReturn(Mat input)
{
Mat hsvInput;
cvtColor(input, hsvInput, CV_BGR2HSV);
Mat sample1 = Mat(hsvInput, skinColorSamplerRectangle1);
Mat sample2 = Mat(hsvInput, skinColorSamplerRectangle2);
calibrated = true;
return calculateAndReturnTresholds(sample1, sample2);
}
void SkinDetector::calculateThresholds(Mat sample1, Mat sample2) {
int offsetLowThreshold = 80;
int offsetHighThreshold = 30;
@@ -75,6 +112,39 @@ namespace computervision
//vHighThreshold = 255;
}
std::vector<int> SkinDetector::calculateAndReturnTresholds(Mat sample1, Mat sample2)
{
calculateThresholds(sample1, sample2);
std::vector<int> res;
res.push_back(hLowThreshold);
res.push_back(hHighThreshold);
res.push_back(sLowThreshold);
res.push_back(sHighThreshold);
res.push_back(vLowThreshold);
res.push_back(vHighThreshold);
return res;
}
void SkinDetector::setTresholds(std::vector<int>& tresholds)
{
if (tresholds.size() != 6)
{
std::cout << "tresholds array not the right size!" << std::endl;
return;
}
hLowThreshold = tresholds[0];
hHighThreshold = tresholds[1];
sLowThreshold = tresholds[2];
sHighThreshold = tresholds[3];
vLowThreshold = tresholds[4];
vHighThreshold = tresholds[5];
calibrated = true;
}
Mat SkinDetector::getSkinMask(Mat input) {
Mat skinMask;

View File

@@ -24,6 +24,9 @@ namespace computervision
*/
void drawSkinColorSampler(Mat input);
void drawSkinColorSampler(Mat input, int x, int y, int width, int heigth);
/*
* @brief calibrates the skin color detector with the given input frame
*
@@ -31,6 +34,10 @@ namespace computervision
*/
void calibrate(Mat input);
std::vector<int> calibrateAndReturn(Mat input);
void setTresholds(std::vector<int>& tresholds);
/*
* @brief gets the mask for the hand
*
@@ -63,6 +70,8 @@ namespace computervision
*/
void calculateThresholds(Mat sample1, Mat sample2);
std::vector<int> calculateAndReturnTresholds(Mat sample1, Mat sample2);
/**
* @brief the opening. it generates the structuring element and performs the morphological transformations required to detect the hand.
* This needs to be done to get the skin mask.

View File

@@ -0,0 +1,12 @@
#pragma once
#include <opencv2/videoio.hpp>
namespace static_camera
{
static cv::VideoCapture getCap()
{
static cv::VideoCapture cap(0);
return cap;
}
};

View File

@@ -0,0 +1,46 @@
#include <iostream>
#include "async_arm_detection.h"
#include "../OpenPoseVideo.h"
#include <thread>
#include "StaticCameraInstance.h"
namespace computervision
{
AsyncArmDetection::AsyncArmDetection()
{
}
void AsyncArmDetection::run_arm_detection(std::function<void(std::vector<Point>, cv::Mat poinst_on_image)> points_ready_func, OpenPoseVideo op)
{
VideoCapture cap = static_camera::getCap();
std::cout << "STARTING THREAD LAMBDA" << std::endl;
/*cv::VideoCapture cap = static_camera::GetCap();*/
if (!cap.isOpened())
{
std::cout << "capture was closed, opening..." << std::endl;
cap.open(0);
}
while (true)
{
Mat img;
cap.read(img);
op.movementSkeleton(img, points_ready_func);
}
}
void AsyncArmDetection::start(std::function<void(std::vector<Point>, cv::Mat poinst_on_image)> points_ready_func, OpenPoseVideo op)
{
std::cout << "starting function" << std::endl;
std::thread async_arm_detect_thread(&AsyncArmDetection::run_arm_detection,this, points_ready_func, op);
async_arm_detect_thread.detach(); // makes sure the thread is detached from the variable.
}
}

View File

@@ -0,0 +1,23 @@
#pragma once
#include <vector>
#include <opencv2/core/types.hpp>
#include <opencv2/videoio.hpp>
#include <functional>
#include "../OpenPoseVideo.h"
#include "StaticCameraInstance.h"
namespace computervision
{
class AsyncArmDetection
{
public:
AsyncArmDetection(void);
void start(std::function<void(std::vector<cv::Point>, cv::Mat poinst_on_image)>, computervision::OpenPoseVideo op);
private:
void run_arm_detection(std::function<void(std::vector<Point>, cv::Mat poinst_on_image)> points_ready_func, OpenPoseVideo op);
};
}

View File

@@ -0,0 +1,92 @@
#include "HandCalibrator.h"
#include <iostream>
#define MIN_MENU_HAND_SIZE 10000
#define MIN_GAME_HAND_SIZE 3000 // todo change
namespace computervision
{
namespace handcalibration
{
HandCalibrator::HandCalibrator()
{
}
void HandCalibrator::DrawHandCalibrationText(cv::Mat& output_frame)
{
cv::rectangle(output_frame, cv::Rect(0, 0, output_frame.cols, 40), cv::Scalar(0, 0, 0), -1);
cv::putText(output_frame, "Hand calibration", cv::Point(output_frame.cols / 2 - 100, 25), cv::FONT_HERSHEY_PLAIN, 2.0, cv::Scalar(18, 219, 65), 2);
cv::putText(output_frame, "press 'b' to calibrate background,then press 's' to calibrate skin tone", cv::Point(5, 35), cv::FONT_HERSHEY_PLAIN, 1.0, cv::Scalar(18, 219, 65), 1);
cv::rectangle(output_frame, cv::Rect(0, output_frame.rows - 80, 450, output_frame.cols), cv::Scalar(0, 0, 0), -1);
cv::putText(output_frame, "hand in frame:", cv::Point(5, output_frame.rows - 50), cv::FONT_HERSHEY_PLAIN, 2.0, cv::Scalar(255, 255, 0), 1);
cv::rectangle(output_frame, cv::Rect(420, output_frame.rows - 67, 15, 15), hand_present ? cv::Scalar(0, 255, 0) : cv::Scalar(0, 0, 255), -1);
DrawBackgroundSkinCalibrated(output_frame);
if (hand_present)
{
std::string hand_text = fingers_amount > 0 ? "open" : "closed";
cv::putText(output_frame, hand_text, cv::Point(10, 75), cv::FONT_HERSHEY_PLAIN, 2.0, cv::Scalar(255, 0, 255), 3);
}
}
void HandCalibrator::DrawBackgroundSkinCalibrated(cv::Mat& output_frame)
{
cv::putText(output_frame, "background calibrated:", cv::Point(5, output_frame.rows - 30), cv::FONT_HERSHEY_PLAIN, 2.0, cv::Scalar(255, 255, 0), 1);
cv::rectangle(output_frame, cv::Rect(420, output_frame.rows - 47, 15, 15), background_calibrated ? cv::Scalar(0, 255, 0) : cv::Scalar(0, 0, 255), -1);
cv::putText(output_frame, "skin color calibrated:", cv::Point(5, output_frame.rows - 10), cv::FONT_HERSHEY_PLAIN, 2.0, cv::Scalar(255, 255, 0), 1);
cv::rectangle(output_frame, cv::Rect(420, output_frame.rows - 27, 15, 15), skintone_calibrated ? cv::Scalar(0, 255, 0) : cv::Scalar(0, 0, 255), -1);
}
void HandCalibrator::SetSkinCalibration(bool val)
{
skintone_calibrated = val;
}
void HandCalibrator::SetBackGroundCalibrated(bool val)
{
background_calibrated = val;
}
void HandCalibrator::SetHandPresent(bool val)
{
hand_present = val;
}
void HandCalibrator::SetAmountOfFingers(int amount)
{
fingers_amount = amount;
}
bool HandCalibrator::CheckIfHandPresent(cv::Mat input_image, HandDetectionType type)
{
std::vector<std::vector<cv::Point>> points;
cv::findContours(input_image, points, cv::RetrievalModes::RETR_LIST, cv::ContourApproximationModes::CHAIN_APPROX_SIMPLE);
if (points.size() == 0) return false;
for (int p = 0; p < points.size(); p++)
{
int area = cv::contourArea(points[p]);
if (type == handcalibration::HandDetectionType::MENU)
if (area > MIN_MENU_HAND_SIZE) return true;
if (type == handcalibration::HandDetectionType::GAME)
if (area > MIN_GAME_HAND_SIZE) return true;
}
return false;
}
}
}

View File

@@ -0,0 +1,76 @@
#pragma once
#include <opencv2/core/base.hpp>
#include <opencv2/imgcodecs.hpp>
#include <opencv2/imgproc.hpp>
namespace computervision
{
namespace handcalibration
{
enum class HandDetectionType
{
MENU,
GAME
};
class HandCalibrator
{
public:
HandCalibrator();
/**
* @brief draws the text to show the status of the calibration on the image
*
* @param output_frame the frame to draw on.
*/
void DrawHandCalibrationText(cv::Mat& output_frame);
/**
* @brief sets the skin calibration variable.
*
* @param val the value to set
*/
void SetSkinCalibration(bool val);
/**
* @brief sets the background calibration variable.
*
* @param val the value to set
*/
void SetBackGroundCalibrated(bool val);
/**
* @brief sets the value for if the hand is present.
*
* @param val the value to set.
*/
void SetHandPresent(bool val);
/**
* @brief checks if the hand is present in the given image
*
* @param input_image the input image to check.
*/
bool CheckIfHandPresent(cv::Mat input_image, HandDetectionType type);
/**
* @brief sets the amount of fingers that are currently detected.
*
* @param amount the amount of fingers.
*/
void SetAmountOfFingers(int amount);
void DrawBackgroundSkinCalibrated(cv::Mat& output_frame);
private:
bool background_calibrated;
bool skintone_calibrated;
bool hand_present;
int fingers_amount;
};
}
}

View File

@@ -0,0 +1,258 @@
#include "house_generator.h"
#include <functional>
#include <iostream>
#include "../renderEngine/obj_loader.h"
#include "../renderEngine/Loader.h"
#include "../toolbox/toolbox.h"
#include "collision_entity.h"
namespace entities
{
HouseGenerator::HouseGenerator()
{
models::RawModel raw_model = render_engine::LoadObjModel("res/HouseNew.obj");
default_texture = { render_engine::loader::LoadTexture("res/Texture.png") };
default_texture.shine_damper = 10;
house_model = { raw_model, default_texture };
GenerateFurnitureModels();
}
std::deque<std::shared_ptr<Entity>> HouseGenerator::GenerateHouse(const glm::vec3& position, float y_rotation)
{
std::deque<std::shared_ptr<Entity>> furniture;
// Add house
furniture.push_front(std::make_shared<Entity>(house_model, position, glm::vec3(0, y_rotation, 0), HOUSE_SIZE));
for(int i = 0; i<toolbox::Random(1,4);i++)
{
FurnitureType type = FurnitureType(toolbox::Random(0, furniture_models.size() - 1));
models::TexturedModel model = GetFurnitureModel(type);
glm::vec3 model_pos = glm::vec3(position.x, position.y, position.z);
collision::Box model_box = { model_pos, model.raw_model.model_size };
model_box.SetRotation(-90);
furniture.push_back(std::make_shared<CollisionEntity>(model, model_pos, glm::vec3(0, -90, 0), HOUSE_SIZE * 2, model_box));
}
/*
// Add furniture
models::TexturedModel couch = GetFurnitureModel(FurnitureType::COUCH);
glm::vec3 couch_pos = glm::vec3(position.x + 200, position.y, position.z + 10);
collision::Box couch_box = { couch_pos, couch.raw_model.model_size };
couch_box.SetRotation(-90);
furniture.push_back(std::make_shared<CollisionEntity>(couch, couch_pos, glm::vec3(0, -90, 0), HOUSE_SIZE * 2, couch_box));
models::TexturedModel table = GetFurnitureModel(FurnitureType::TABLE);
glm::vec3 table_pos = glm::vec3(position.x - 30, position.y, position.z);
collision::Box table_box = { table_pos, table.raw_model.model_size };
furniture.push_back(std::make_shared<CollisionEntity>(table, table_pos, glm::vec3(0, 0, 0), HOUSE_SIZE * 1.3, table_box));
models::TexturedModel chair = GetFurnitureModel(FurnitureType::CHAIR);
glm::vec3 chair_pos = glm::vec3(position.x - 50, position.y, position.z + 220);
collision::Box chair_box = { chair_pos, chair.raw_model.model_size };
furniture.push_back(std::make_shared<CollisionEntity>(chair, chair_pos, glm::vec3(0, 0, 0), HOUSE_SIZE, chair_box));
models::TexturedModel plant = GetFurnitureModel(FurnitureType::PLANT);
glm::vec3 plant_pos = glm::vec3(position.x - 50, position.y, position.z + 220);
collision::Box plant_box = { plant_pos, plant.raw_model.model_size };
furniture.push_back(std::make_shared<CollisionEntity>(plant, plant_pos, glm::vec3(0, 0, 0), HOUSE_SIZE, plant_box));
models::TexturedModel guitar = GetFurnitureModel(FurnitureType::GUITAR);
glm::vec3 guitar_pos = glm::vec3(position.x - 50, position.y, position.z + 220);
collision::Box guitar_box = { guitar_pos, guitar.raw_model.model_size };
furniture.push_back(std::make_shared<CollisionEntity>(guitar, guitar_pos, glm::vec3(0, 0, 0), HOUSE_SIZE, guitar_box));
models::TexturedModel bookshelf = GetFurnitureModel(FurnitureType::BOOKSHELF);
glm::vec3 bookshelf_pos = glm::vec3(position.x - 50, position.y, position.z + 220);
collision::Box bookshelf_box = { bookshelf_pos, bookshelf.raw_model.model_size };
furniture.push_back(std::make_shared<CollisionEntity>(bookshelf, bookshelf_pos, glm::vec3(0, 0, 0), HOUSE_SIZE, bookshelf_box));
models::TexturedModel lamp = GetFurnitureModel(FurnitureType::LAMP);
glm::vec3 lamp_pos = glm::vec3(position.x - 50, position.y, position.z + 220);
collision::Box lamp_box = { lamp_pos, lamp.raw_model.model_size };
furniture.push_back(std::make_shared<CollisionEntity>(lamp, lamp_pos, glm::vec3(0, 0, 0), HOUSE_SIZE, lamp_box));
models::TexturedModel ceiling_object = GetFurnitureModel(FurnitureType::CEILING_OBJECTS);
glm::vec3 ceiling_object_pos = glm::vec3(position.x - 50, position.y, position.z + 220);
collision::Box ceiling_object_box = { ceiling_object_pos, ceiling_object.raw_model.model_size };
furniture.push_back(std::make_shared<CollisionEntity>(ceiling_object, ceiling_object_pos, glm::vec3(0, 0, 0), HOUSE_SIZE, ceiling_object_box));
models::TexturedModel misc = GetFurnitureModel(FurnitureType::MISC);
glm::vec3 misc_pos = glm::vec3(position.x - 50, position.y, position.z + 220);
collision::Box misc_box = { misc_pos, misc.raw_model.model_size };
furniture.push_back(std::make_shared<CollisionEntity>(misc, misc_pos, glm::vec3(0, 0, 0), HOUSE_SIZE, misc_box));
*/
return furniture;
}
models::TexturedModel HouseGenerator::GetFurnitureModel(FurnitureType furniture)
{
const auto found = furniture_models.find(furniture);
if (found == furniture_models.end())
{
std::cerr << "OH NEEEEEEEEEEEEEEE";
}
auto models = found->second;
const int modelNumber = toolbox::Random(0, models.size() - 1);
return models[modelNumber];
}
void HouseGenerator::GenerateFurnitureModels()
{
// Couches
std::deque<models::TexturedModel> couches;
models::RawModel couch_inside_model = render_engine::LoadObjModel("res/couchThree.obj");
models::TexturedModel couch_inside = { couch_inside_model, default_texture };
couches.push_back(couch_inside);
models::RawModel couch_inside_model2 = render_engine::LoadObjModel("res/Coach.obj");
models::TexturedModel couch_inside2 = { couch_inside_model2, default_texture };
couches.push_back(couch_inside2);
models::RawModel couch_inside_model3 = render_engine::LoadObjModel("res/lawnBenchOne.obj");
models::TexturedModel couch_inside3 = { couch_inside_model3, default_texture };
couches.push_back(couch_inside3);
furniture_models.insert(std::pair<FurnitureType, std::deque<models::TexturedModel>>(FurnitureType::COUCH, couches));
// Tables
std::deque<models::TexturedModel> tables;
models::RawModel table_model1 = render_engine::LoadObjModel("res/tableOne.obj");
models::TexturedModel table1 = { table_model1, default_texture };
tables.push_back(table1);
models::RawModel table_model2 = render_engine::LoadObjModel("res/tableTwo.obj");
models::TexturedModel table2 = { table_model2, default_texture };
tables.push_back(table2);
models::RawModel table_model3 = render_engine::LoadObjModel("res/bureauOne.obj");
models::TexturedModel table3 = { table_model3, default_texture };
tables.push_back(table3);
furniture_models.insert(std::pair<FurnitureType, std::deque<models::TexturedModel>>(FurnitureType::TABLE, tables));
// Chairs
std::deque<models::TexturedModel> chairs;
models::RawModel chair_model1 = render_engine::LoadObjModel("res/launchchair.obj");
models::TexturedModel chair1 = { chair_model1, default_texture };
chairs.push_back(chair1);
models::RawModel chair_model2 = render_engine::LoadObjModel("res/lawnChairOne.obj");
models::TexturedModel chair2 = { chair_model2, default_texture };
chairs.push_back(chair2);
models::RawModel chair_model3 = render_engine::LoadObjModel("res/ugly_chair.obj");
models::TexturedModel chair3 = { chair_model3, default_texture };
chairs.push_back(chair3);
furniture_models.insert(std::pair<FurnitureType, std::deque<models::TexturedModel>>(FurnitureType::CHAIR, chairs));
// Plants
std::deque<models::TexturedModel> plants;
models::RawModel plant_model1 = render_engine::LoadObjModel("res/plantOne.obj");
models::TexturedModel plant1 = { plant_model1, default_texture };
plants.push_back(plant1);
models::RawModel plant_model2 = render_engine::LoadObjModel("res/plantTwo.obj");
models::TexturedModel plant2 = { plant_model2, default_texture };
plants.push_back(plant2);
models::RawModel plant_model3 = render_engine::LoadObjModel("res/plantThree.obj");
models::TexturedModel plant3 = { plant_model3, default_texture };
plants.push_back(plant3);
furniture_models.insert(std::pair<FurnitureType, std::deque<models::TexturedModel>>(FurnitureType::PLANT, plants));
// Guitars
std::deque<models::TexturedModel> guitars;
models::RawModel guitar_model1 = render_engine::LoadObjModel("res/guitarOne.obj");
models::TexturedModel guitar1 = { guitar_model1, default_texture };
guitars.push_back(guitar1);
models::RawModel guitar_model2 = render_engine::LoadObjModel("res/guitarTwo.obj");
models::TexturedModel guitar2 = { guitar_model2, default_texture };
guitars.push_back(guitar2);
furniture_models.insert(std::pair<FurnitureType, std::deque<models::TexturedModel>>(FurnitureType::GUITAR, guitars));
// Bookshelves
std::deque<models::TexturedModel> bookshelves;
models::RawModel bookshelf_model1 = render_engine::LoadObjModel("res/bookShelfOne.obj");
models::TexturedModel bookshelf1 = { bookshelf_model1, default_texture };
bookshelves.push_back(bookshelf1);
models::RawModel bookshelf_model2 = render_engine::LoadObjModel("res/bookShelfTwo.obj");
models::TexturedModel bookshelf2 = { bookshelf_model2, default_texture };
bookshelves.push_back(bookshelf2);
models::RawModel bookshelf_model3 = render_engine::LoadObjModel("res/bookShelfThree.obj");
models::TexturedModel bookshelf3 = { bookshelf_model3, default_texture };
bookshelves.push_back(bookshelf3);
furniture_models.insert(std::pair<FurnitureType, std::deque<models::TexturedModel>>(FurnitureType::BOOKSHELF, bookshelves));
// Lamps
std::deque<models::TexturedModel>lamps;
models::RawModel lamp_model1 = render_engine::LoadObjModel("res/lampOne.obj");
models::TexturedModel lamp1 = { lamp_model1, default_texture };
lamps.push_back(lamp1);
models::RawModel lamp_model2 = render_engine::LoadObjModel("res/lampTwo.obj");
models::TexturedModel lamp2 = { lamp_model2, default_texture };
lamps.push_back(lamp2);
furniture_models.insert(std::pair<FurnitureType, std::deque<models::TexturedModel>>(FurnitureType::LAMP, lamps));
// Ceiling objects
std::deque<models::TexturedModel>ceiling_Objects;
models::RawModel ceiling_Obj_model1 = render_engine::LoadObjModel("res/ceilingFan.obj");
models::TexturedModel ceiling_Obj1 = { ceiling_Obj_model1, default_texture };
ceiling_Objects.push_back(ceiling_Obj1);
models::RawModel ceiling_Obj_model2 = render_engine::LoadObjModel("res/ceilingFanTwo.obj");
models::TexturedModel ceiling_Obj2 = { ceiling_Obj_model2, default_texture };
ceiling_Objects.push_back(ceiling_Obj2);
models::RawModel ceiling_Obj_model3 = render_engine::LoadObjModel("res/ceilingLampOne.obj");
models::TexturedModel ceiling_Obj3 = { ceiling_Obj_model3, default_texture };
ceiling_Objects.push_back(ceiling_Obj3);
models::RawModel ceiling_Obj_model4 = render_engine::LoadObjModel("res/ceilingLampTwo.obj");
models::TexturedModel ceiling_Obj4 = { ceiling_Obj_model4, default_texture };
ceiling_Objects.push_back(ceiling_Obj4);
furniture_models.insert(std::pair<FurnitureType, std::deque<models::TexturedModel>>(FurnitureType::CEILING_OBJECTS, ceiling_Objects));
// Miscs
std::deque<models::TexturedModel> miscs;
models::RawModel misc_model1 = render_engine::LoadObjModel("res/tv.obj");
models::TexturedModel misc1 = { misc_model1, default_texture };
miscs.push_back(misc1);
models::RawModel misc_model2 = render_engine::LoadObjModel("res/radio.obj");
models::TexturedModel misc2 = { misc_model2, default_texture };
miscs.push_back(misc2);
models::RawModel misc_model3 = render_engine::LoadObjModel("res/Flowerpot.obj");
models::TexturedModel misc3 = { misc_model3, default_texture };
miscs.push_back(misc3);
furniture_models.insert(std::pair<FurnitureType, std::deque<models::TexturedModel>>(FurnitureType::MISC, miscs));
}
}

View File

@@ -0,0 +1,67 @@
#pragma once
#include <deque>
#include <memory>
#include <map>
#include "../models/Model.h"
#include "../collision/collision.h"
namespace entities
{
enum class FurnitureType
{
COUCH,
TABLE,
CHAIR,
PLANT,
GUITAR,
BOOKSHELF,
LAMP,
CEILING_OBJECTS,
MISC
};
class HouseGenerator
{
private:
const float HOUSE_SIZE = 30;
models::TexturedModel house_model;
models::ModelTexture default_texture;
std::map<FurnitureType, std::deque<models::TexturedModel>> furniture_models;
public:
HouseGenerator();
/*
* @brief: This function generates a house with furniture at the given position and rotation
*
* @param position: The position of the house to render
* @param y_rotation: The y rotation the house needs to be rendered with
*
* @return: A list with all the entities of the generated house (the furniture)
*/
std::deque<std::shared_ptr<Entity>> GenerateHouse(const glm::vec3& position, float y_rotation);
/*
* @brief: Returns the depth of the house (chunk)
*/
float GetHouseDepth() const { return house_model.raw_model.model_size.x * HOUSE_SIZE; }
private:
/*
* @brief: This function loads all the 3D furniture models
*/
void GenerateFurnitureModels();
/*
* @brief: This funtion chooses and returns a random furniture of the given furniture type
*
* @param furniture: The furniture you want to get
*
* @return: The model of the random furniture of the chosen furniture type
*/
models::TexturedModel GetFurnitureModel(FurnitureType furniture);
};
}

View File

@@ -1,6 +1,8 @@
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <glm/gtc/matrix_transform.hpp>
#include <functional>
#include <vector>
#define STB_IMAGE_IMPLEMENTATION
#include <iostream>
#include <map>
@@ -12,6 +14,7 @@
#include <opencv2/videoio.hpp>
#include <opencv2/video.hpp>
#include "collision/collision.h"
#include "gui/gui_interactable.h"
#include "models/model.h"
#include "renderEngine/loader.h"
@@ -24,6 +27,10 @@
#include "scenes/startup_Scene.h"
#include "computervision/ObjectDetection.h"
//#include "computervision/OpenPoseImage.h"
#include "computervision/OpenPoseVideo.h"
#include "computervision/async/async_arm_detection.h"
#pragma comment(lib, "glfw3.lib")
#pragma comment(lib, "glew32s.lib")
@@ -31,9 +38,21 @@
static double UpdateDelta();
static GLFWwindow* window;
scene::Scene* current_scene;
static GLFWwindow* window;
bool points_img_available = false;
cv::Mat points_img;
void retrieve_points(std::vector<Point> arm_points, cv::Mat points_on_image)
{
std::cout << "got points!!" << std::endl;
std::cout << "points: " << arm_points << std::endl;
points_img = points_on_image;
points_img_available = true;
}
int main(void)
{
#pragma region OPENGL_SETTINGS

View File

@@ -1,6 +1,7 @@
#pragma once
#include <GL/glew.h>
#include <glm/gtc/matrix_transform.hpp>
namespace models
{

View File

@@ -4,7 +4,6 @@
#include "loader.h"
#include "../toolbox/toolbox.h"
#include "renderer.h"
#include <iostream>
namespace render_engine
@@ -51,12 +50,12 @@ namespace render_engine
/*
This function will Render a Model on the screen.
*/
void Render(entities::Entity& entity, shaders::EntityShader& shader)
void Render(std::shared_ptr<entities::Entity> entity, shaders::EntityShader& shader)
{
const models::TexturedModel model = entity.GetModel();
const models::TexturedModel model = entity.get()->GetModel();
const models::RawModel raw_model = model.raw_model;
const models::ModelTexture texture = model.texture;
// Enable the model (VAO)
glBindVertexArray(raw_model.vao_id);
@@ -66,7 +65,7 @@ namespace render_engine
glEnableVertexAttribArray(2);
// Load the transformation of the model into the shader
const glm::mat4 modelMatrix = toolbox::CreateModelMatrix(entity.GetPosition(), entity.GetRotation(), entity.GetScale());
const glm::mat4 modelMatrix = toolbox::CreateModelMatrix(entity.get()->GetPosition(), entity.get()->GetRotation(), entity.get()->GetScale());
shader.LoadModelMatrix(modelMatrix);
shader.LoadShineVariables(texture.shine_damper, texture.reflectivity);

View File

@@ -1,5 +1,6 @@
#pragma once
#include <memory>
#include "../gui/gui_element.h"
#include "../entities/entity.h"
#include "../shaders/entity_shader.h"
@@ -30,7 +31,7 @@ namespace render_engine
@param entity: The entity which needs to be rendered
@param shader: The shader the entity needs to be rendered with
*/
void Render(entities::Entity& entity, shaders::EntityShader& shader);
void Render(std::shared_ptr<entities::Entity> entity, shaders::EntityShader& shader);
/*
@brief: Call this function to render gui_textures on the screen

View File

@@ -13,30 +13,38 @@
#include "../renderEngine/renderer.h"
#include "../shaders/entity_shader.h"
#include "../toolbox/toolbox.h"
#include "../entities/house_generator.h"
#include <deque>
#include <functional>
#include <memory>
#include <queue>
#include <opencv2/core/base.hpp>
#include "../computervision/HandDetectRegion.h"
#include "../computervision/ObjectDetection.h"
#define MAX_MODEL_DEQUE_SIZE 6 // max amount of models to load at the same time
#define UPCOMING_MODEL_AMOUNT 4 // how much models should be loaded in front of us
namespace scene
{
std::deque<entities::Entity> house_models;
std::shared_ptr<entities::MainCharacter>main_character;
std::deque<entities::Light> lights;
std::deque<entities::CollisionEntity> trees;
std::vector<std::shared_ptr<entities::CollisionEntity>> collision_entities;
entities::HouseGenerator* house_generator;
std::deque<std::shared_ptr<entities::Entity>> house_models;
models::RawModel raw_model, raw_model_char;
models::ModelTexture texture;
shaders::EntityShader* shader;
shaders::GuiShader* gui_shader;
entities::Camera camera(glm::vec3(0, 0, 0), glm::vec3(0, 0, 0));
entities::Camera camera(glm::vec3(0, -50, 0), glm::vec3(0, 0, 0));
std::vector<gui::GuiTexture*> guis;
models::TexturedModel model;
models::TexturedModel tree;
std::vector<computervision::HandDetectRegion> regions;
computervision::HandDetectRegion reg_left("left", 0, 0, 150, 150), reg_right("right", 0, 0, 150, 150), reg_up("up", 0, 0, 150, 150);
In_Game_Scene::In_Game_Scene()
{
@@ -60,6 +68,11 @@ namespace scene
return box;
}
In_Game_Scene::~In_Game_Scene()
{
delete house_generator;
}
/**
* @brief loads a new chunk in front of the camera, and deletes the chunk behind the camera.
*
@@ -68,48 +81,48 @@ namespace scene
*/
void load_chunk(int model_pos)
{
std::cout << "loading model chunk" << std::endl;
if (house_models.size() >= MAX_MODEL_DEQUE_SIZE)
{
house_models.pop_back();
trees.pop_back();
collision_entities.erase(collision_entities.begin() + 1);
}
int z_offset = model_pos * (model.raw_model.model_size.x * 20); // how much "in the distance" we should load the model
house_models.push_front(entities::Entity(model, glm::vec3(0, -50, -50 - z_offset), glm::vec3(0, 90, 0), 20));
collision::Box tree_box = create_bounding_box(tree.raw_model.model_size, glm::vec3(0, 0, -50 - z_offset),3);
std::shared_ptr<entities::CollisionEntity> tree_entity = std::make_shared<entities::CollisionEntity>(tree, glm::vec3(0, 0, -50 - z_offset), glm::vec3(0, 90, 0), 3, tree_box);
trees.push_front(*tree_entity);
collision_entities.push_back(tree_entity);
//std::cout << collision_entities.size() << std::endl;
/*if (collision_entities.size() > 0) {
std::cout << collision_entities[0].get()->GetPosition().z << std::endl;
std::cout << "x: " << main_character->GetPosition().x << "\ny: " << main_character->GetPosition().y << "\nz: " << main_character->GetPosition().z << "\n";
}*/
static unsigned int furniture_count = 0;
// set up squares according to size of camera input
cv::Mat camera_frame;
static_camera::getCap().read(camera_frame); // get camera frame to know the width and heigth
reg_left.SetXPos(10);
reg_left.SetYPos(camera_frame.rows / 2 - reg_left.GetHeight()/2);
reg_right.SetXPos(camera_frame.cols - 10 - reg_right.GetWidth());
reg_right.SetYPos(camera_frame.rows / 2 - reg_right.GetHeight()/2);
reg_up.SetXPos(camera_frame.cols / 2 - reg_up.GetWidth() / 2);
reg_up.SetYPos(10);
std::cout << "loading model chunk" << std::endl;
if (house_models.size() >= MAX_MODEL_DEQUE_SIZE * furniture_count)
{
for (int i = 0; i < furniture_count; i++)
{
house_models.pop_front();
}
}
int z_offset = model_pos * (house_generator->GetHouseDepth()); // how much "in the distance" we should load the model
std::deque<std::shared_ptr<entities::Entity>> furniture = house_generator->GenerateHouse(glm::vec3(0, -75, -50 - z_offset), 90);
furniture_count = furniture.size();
house_models.insert(house_models.end(), furniture.begin(), furniture.end());
}
scene::Scenes scene::In_Game_Scene::start(GLFWwindow* window)
{
raw_model = render_engine::LoadObjModel("res/House.obj");
texture = { render_engine::loader::LoadTexture("res/Texture.png") };
texture.shine_damper = 10;
texture.reflectivity = 0;
model = { raw_model, texture };
models::RawModel raw_tree_model = render_engine::LoadObjModel("res/Tree.obj");
models::ModelTexture tree_texture = { render_engine::loader::LoadTexture("res/TreeTexture.png") };
tree = { raw_tree_model, tree_texture };
raw_model_char = render_engine::LoadObjModel("res/beeTwo.obj");
models::TexturedModel model_char = { raw_model_char, texture };
collision::Box char_box = create_bounding_box(raw_model_char.model_size, glm::vec3(0, 0, 0), 1);
main_character = std::make_shared<entities::MainCharacter>(model_char, glm::vec3(0, -50, -100), glm::vec3(0, 90, 0), 5, char_box);
collision_entities.push_back(main_character);
house_generator = new entities::HouseGenerator();
// load the first few house models
for (int i = 0; i <= UPCOMING_MODEL_AMOUNT; i++)
{
@@ -156,15 +169,10 @@ namespace scene
shader->LoadLightsDeque(lights);
shader->LoadViewMatrix(camera);
for (entities::Entity& model_entity : house_models)
for (std::shared_ptr<entities::Entity> model_entity : house_models)
{
render_engine::renderer::Render(model_entity, *shader);
}
for (entities::Entity& tree_entity : trees)
{
render_engine::renderer::Render(tree_entity, *shader);
}
render_engine::renderer::Render(*main_character, *shader);
@@ -186,9 +194,7 @@ namespace scene
// calculate where the next house model should be loaded
static int last_model_pos = 0;
int model_pos = -round(camera.GetPosition().z / (model.raw_model.model_size.x * 20)); // how much models we have passed, minus because we are moving in the negative z axis
//std::cout << collision_entities.size() << std::endl;
int model_pos = -round(camera.GetPosition().z / (house_generator->GetHouseDepth())); // how much models we have passed, minus because we are moving in the negative z axis
// if we have passed a model, load a new one and delete the one behind us
if (last_model_pos != model_pos)
@@ -198,14 +204,40 @@ namespace scene
// remember the position at which the new model was added
last_model_pos = model_pos;
collision::CheckCollisions(collision_entities);
update_hand_detection();
}
void scene::In_Game_Scene::onKey(GLFWwindow* window, int key, int scancode, int action, int mods)
{
if (glfwGetKey(window, GLFW_KEY_ESCAPE) == GLFW_PRESS)
{
cv::destroyWindow("camera");
return_value = scene::Scenes::STOP;
}
if (glfwGetKey(window, GLFW_KEY_B) == GLFW_PRESS)
{
reg_left.CalibrateBackground();
reg_right.CalibrateBackground();
reg_up.CalibrateBackground();
}
if (glfwGetKey(window, GLFW_KEY_S) == GLFW_PRESS)
{
std::vector<int> tresholds = reg_left.CalculateSkinTresholds();
reg_right.setSkinTresholds(tresholds);
reg_up.setSkinTresholds(tresholds);
}
}
void scene::In_Game_Scene::update_hand_detection()
{
cv::Mat camera_frame;
static_camera::getCap().read(camera_frame);
reg_left.DetectHand(camera_frame);
reg_right.DetectHand(camera_frame);
reg_up.DetectHand(camera_frame);
cv::imshow("camera", camera_frame);
}
}

View File

@@ -8,9 +8,11 @@ namespace scene
{
private:
scene::Scenes return_value = scene::Scenes::INGAME;
void update_hand_detection();
public:
In_Game_Scene();
~In_Game_Scene();
Scenes start(GLFWwindow* window) override;
void render() override;

View File

@@ -2,10 +2,13 @@
#include <GLFW/glfw3.h>
#include <map>
#include "startup_Scene.h"
#include "../computervision/ObjectDetection.h"
#include "../computervision/HandDetectRegion.h"
#include <iostream>
namespace scene
{
computervision::ObjectDetection objDetect;
scene::Scenes scene::Startup_Scene::start(GLFWwindow *window)
{
while (return_value == scene::Scenes::STARTUP)
@@ -27,7 +30,8 @@ namespace scene
void scene::Startup_Scene::update(GLFWwindow* window)
{
bool hand_present;
objDetect.DetectHand(objDetect.ReadCamera(),hand_present);
}
void scene::Startup_Scene::onKey(GLFWwindow* window, int key, int scancode, int action, int mods)
@@ -35,6 +39,7 @@ namespace scene
if (glfwGetKey(window, GLFW_KEY_SPACE) == GLFW_PRESS)
{
return_value = scene::Scenes::INGAME;
cv::destroyWindow("camera");
}
}
}

View File

@@ -29,7 +29,7 @@ namespace shaders
uniform vec3 light_position[4];
const float density = 0.0017;
const float gradient = 4;
const float gradient = 3;
void main(void)
{

View File

@@ -1,3 +1,4 @@
#include <ctime>
#include "toolbox.h"
namespace toolbox
@@ -43,5 +44,16 @@ namespace toolbox
final.y = Lerp(from.y, to.y, amount);
final.z = Lerp(from.z, to.z, amount);
return final;
}
int Random(const int min, const int max)
{
static bool first = true;
if (first)
{
srand(time(0));
first = false;
}
return min + rand() % ((max + 1) - min);
}
}

View File

@@ -68,4 +68,14 @@ namespace toolbox
* @return position of where to go
*/
glm::vec3 Lerp(glm::vec3 from, glm::vec3 to, float amount);
/*
* @brief: This function will return a value between min and max
*
* @param min: The min value
* @param max: The max value
*
* @return: The random number
*/
int Random(const int min, const int max);
}

View File

@@ -21,9 +21,13 @@
<ItemGroup>
<ClCompile Include="src\collision\collision_handler.cpp" />
<ClCompile Include="src\entities\main_character.cpp" />
<ClCompile Include="src\entities\house_generator.cpp" />
<ClCompile Include="src\computervision\calibration\HandCalibrator.cpp" />
<ClCompile Include="src\computervision\HandDetectRegion.cpp" />
<ClCompile Include="src\scenes\in_Game_Scene.cpp" />
<ClCompile Include="src\computervision\FaceDetector.cpp" />
<ClCompile Include="src\computervision\async\async_arm_detection.cpp" />
<ClCompile Include="src\computervision\ObjectDetection.cpp" />
<ClCompile Include="src\computervision\OpenPoseVideo.cpp" />
<ClCompile Include="src\computervision\SkinDetector.cpp" />
<ClCompile Include="src\computervision\FingerCount.cpp" />
<ClCompile Include="src\computervision\BackgroundRemover.cpp" />
@@ -45,11 +49,17 @@
<ClInclude Include="src\collision\collision.h" />
<ClInclude Include="src\collision\collision_handler.h" />
<ClInclude Include="src\entities\main_character.h" />
<ClInclude Include="src\entities\house_generator.h" />
<ClInclude Include="src\computervision\calibration\HandCalibrator.h" />
<ClInclude Include="src\computervision\calibration\StaticSkinTreshold.h" />
<ClInclude Include="src\computervision\HandDetectRegion.h" />
<ClInclude Include="src\scenes\in_Game_Scene.h" />
<ClInclude Include="src\scenes\scene.h" />
<ClInclude Include="src\computervision\FaceDetector.h" />
<ClInclude Include="src\computervision\async\async_arm_detection.h" />
<ClInclude Include="src\computervision\async\StaticCameraInstance.h" />
<ClInclude Include="src\computervision\FingerCount.h" />
<ClInclude Include="src\computervision\BackgroundRemover.h" />
<ClInclude Include="src\computervision\OpenPoseVideo.h" />
<ClInclude Include="src\computervision\SkinDetector.h" />
<ClInclude Include="src\computervision\ObjectDetection.h" />
<ClInclude Include="src\entities\camera.h" />
@@ -73,6 +83,12 @@
<ItemGroup>
<Xml Include="res\haarcascade_frontalface_alt.xml" />
</ItemGroup>
<ItemGroup>
<None Include="..\..\Avans Hogeschool\Kim Veldhoen - Proftaak 2.4\pose_iter_160000.caffemodel" />
<None Include="res\pose\coco\pose_deploy_linevec.prototxt" />
<None Include="res\pose\mpi\pose_deploy_linevec_faster_4_stages.prototxt" />
<None Include="res\pose\mpi\pose_iter_160000.caffemodel" />
</ItemGroup>
<PropertyGroup Label="Globals">
<VCProjectVersion>16.0</VCProjectVersion>
<ProjectGuid>{A7ECF1BE-DB22-4BF7-BFF6-E3BF72691EE6}</ProjectGuid>
@@ -141,6 +157,8 @@
<LinkIncremental>false</LinkIncremental>
<IncludePath>$(VC_IncludePath);$(WindowsSDK_IncludePath);;C:\opencv\opencv\build\include;C:\opencv\build\include</IncludePath>
<LibraryPath>$(VC_LibraryPath_x64);$(WindowsSDK_LibraryPath_x64);C:\opencv\opencv\build\x64\vc15\lib;C:\opencv\build\x64\vc15\lib</LibraryPath>
<IncludePath>C:\opencv\build\include\;$(VC_IncludePath);$(WindowsSDK_IncludePath);C:\opencv\opencv\build\include</IncludePath>
<LibraryPath>C:\opencv\build\x64\vc15\lib;$(VC_LibraryPath_x64);$(WindowsSDK_LibraryPath_x64);C:\opencv\opencv\build\x64\vc15\lib</LibraryPath>
</PropertyGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<ClCompile>
@@ -214,6 +232,7 @@
<GenerateDebugInformation>true</GenerateDebugInformation>
<AdditionalLibraryDirectories>$(SolutionDir)lib\glfw-3.3.2\$(Platform);$(SolutionDir)lib\glew-2.1.0\lib\Release\$(Platform);%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
<AdditionalDependencies>kernel32.lib;user32.lib;gdi32.lib;winspool.lib;comdlg32.lib;advapi32.lib;shell32.lib;ole32.lib;oleaut32.lib;uuid.lib;odbc32.lib;odbccp32.lib;%(AdditionalDependencies); opencv_world452.lib;opencv_world452d.lib</AdditionalDependencies>
<AdditionalDependencies>opencv_world452.lib;kernel32.lib;user32.lib;gdi32.lib;winspool.lib;comdlg32.lib;advapi32.lib;shell32.lib;ole32.lib;oleaut32.lib;uuid.lib;odbc32.lib;odbccp32.lib;%(AdditionalDependencies)</AdditionalDependencies>
</Link>
</ItemDefinitionGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />

View File

@@ -1,18 +1,29 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup>
<Filter Include="Source Files">
<UniqueIdentifier>{4FC737F1-C7A5-4376-A066-2A32D752A2FF}</UniqueIdentifier>
<Extensions>cpp;c;cc;cxx;c++;def;odl;idl;hpj;bat;asm;asmx</Extensions>
</Filter>
<Filter Include="Header Files">
<UniqueIdentifier>{93995380-89BD-4b04-88EB-625FBE52EBFB}</UniqueIdentifier>
<Extensions>h;hh;hpp;hxx;h++;hm;inl;inc;ipp;xsd</Extensions>
</Filter>
<Filter Include="Resource Files">
<UniqueIdentifier>{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}</UniqueIdentifier>
<Extensions>rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms</Extensions>
</Filter>
<ClCompile Include="src\collision\collision_handler.cpp" />
<ClCompile Include="src\scenes\in_Game_Scene.cpp" />
<ClCompile Include="src\computervision\async\async_arm_detection.cpp" />
<ClCompile Include="src\computervision\ObjectDetection.cpp" />
<ClCompile Include="src\computervision\OpenPoseVideo.cpp" />
<ClCompile Include="src\computervision\SkinDetector.cpp" />
<ClCompile Include="src\computervision\FingerCount.cpp" />
<ClCompile Include="src\computervision\BackgroundRemover.cpp" />
<ClCompile Include="src\entities\camera.cpp" />
<ClCompile Include="src\entities\collision_entity.cpp" />
<ClCompile Include="src\entities\entity.cpp" />
<ClCompile Include="src\gui\gui_interactable.cpp" />
<ClCompile Include="src\main.cpp" />
<ClCompile Include="src\renderEngine\loader.cpp" />
<ClCompile Include="src\renderEngine\obj_loader.cpp" />
<ClCompile Include="src\renderEngine\renderer.cpp" />
<ClCompile Include="src\shaders\gui_shader.cpp" />
<ClCompile Include="src\shaders\shader_program.cpp" />
<ClCompile Include="src\shaders\entity_shader.cpp" />
<ClCompile Include="src\toolbox\toolbox.cpp" />
<ClCompile Include="src\scenes\startup_Scene.cpp" />
<ClCompile Include="src\computervision\calibration\HandCalibrator.cpp" />
<ClCompile Include="src\computervision\HandDetectRegion.cpp" />
</ItemGroup>
<ItemGroup>
<ClCompile Include="src\entities\Camera.cpp">
@@ -75,6 +86,9 @@
<ClCompile Include="src\computervision\BackgroundRemover.cpp">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="src\entities\house_generator.cpp">
<Filter>Source Files</Filter>
</ClCompile>
</ItemGroup>
<ItemGroup>
<ClInclude Include="src\entities\Camera.h">
@@ -155,8 +169,48 @@
<ClInclude Include="src\toolbox\Timer.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="src\entities\house_generator.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="src\collision\collision.h" />
<ClInclude Include="src\collision\collision_handler.h" />
<ClInclude Include="src\scenes\in_Game_Scene.h" />
<ClInclude Include="src\scenes\scene.h" />
<ClInclude Include="src\computervision\async\async_arm_detection.h" />
<ClInclude Include="src\computervision\async\StaticCameraInstance.h" />
<ClInclude Include="src\computervision\FingerCount.h" />
<ClInclude Include="src\computervision\BackgroundRemover.h" />
<ClInclude Include="src\computervision\OpenPoseVideo.h" />
<ClInclude Include="src\computervision\SkinDetector.h" />
<ClInclude Include="src\computervision\ObjectDetection.h" />
<ClInclude Include="src\entities\camera.h" />
<ClInclude Include="src\entities\collision_entity.h" />
<ClInclude Include="src\entities\entity.h" />
<ClInclude Include="src\entities\light.h" />
<ClInclude Include="src\gui\gui_element.h" />
<ClInclude Include="src\gui\gui_interactable.h" />
<ClInclude Include="src\models\model.h" />
<ClInclude Include="src\renderEngine\loader.h" />
<ClInclude Include="src\renderEngine\obj_loader.h" />
<ClInclude Include="src\renderEngine\renderer.h" />
<ClInclude Include="src\shaders\gui_shader.h" />
<ClInclude Include="src\shaders\shader_program.h" />
<ClInclude Include="src\shaders\entity_shader.h" />
<ClInclude Include="src\stb_image.h" />
<ClInclude Include="src\toolbox\Timer.h" />
<ClInclude Include="src\toolbox\toolbox.h" />
<ClInclude Include="src\scenes\startup_Scene.h" />
<ClInclude Include="src\computervision\calibration\HandCalibrator.h" />
<ClInclude Include="src\computervision\HandDetectRegion.h" />
<ClInclude Include="src\computervision\calibration\StaticSkinTreshold.h" />
</ItemGroup>
<ItemGroup>
<Xml Include="res\haarcascade_frontalface_alt.xml" />
</ItemGroup>
<ItemGroup>
<None Include="..\..\Avans Hogeschool\Kim Veldhoen - Proftaak 2.4\pose_iter_160000.caffemodel" />
<None Include="res\pose\coco\pose_deploy_linevec.prototxt" />
<None Include="res\pose\mpi\pose_deploy_linevec_faster_4_stages.prototxt" />
<None Include="res\pose\mpi\pose_iter_160000.caffemodel" />
</ItemGroup>
</Project>