13 Commits

Author SHA1 Message Date
Sem van der Hoeven
921609de5d [EDIT] stuff 2021-06-04 11:39:23 +02:00
Sem van der Hoeven
fe94b0f83d [FIX] showing of pose detection points 2021-06-04 10:55:53 +02:00
Sem van der Hoeven
ab30c41bee [FIX] crashing with pose detection 2021-06-02 10:41:50 +02:00
Sem van der Hoeven
1a149b8b7e [ADD] static camera instance 2021-06-02 10:05:09 +02:00
Sem van der Hoeven
cc7cb37840 [ADD] caffemodel project entry 2021-06-02 09:44:30 +02:00
Sem van der Hoeven
40529f84b3 [ADD] basis for async arm detection 2021-05-28 15:31:21 +02:00
Jasper
a68c6a57bf [EDIT] edited file 2021-05-28 12:32:10 +02:00
Jasper
078a6ce66d [ADD] added all the files 2021-05-28 12:27:12 +02:00
Sem van der Hoeven
f1f1aac93d [ADD] comments 2021-05-25 15:54:02 +02:00
Sem van der Hoeven
563f465e2c [EDIT] remove unused methods 2021-05-25 15:46:53 +02:00
Sem van der Hoeven
05ae8ee019 [FEATURE] finished hand open/closed recognition 2021-05-25 14:49:04 +02:00
Sem van der Hoeven
3696e2eb30 [EDIT] improve hand detection with mask 2021-05-25 14:19:18 +02:00
Sem van der Hoeven
276aa1a449 [ADD] mask methods 2021-05-25 13:31:25 +02:00
25 changed files with 5496 additions and 236 deletions

2
.gitignore vendored
View File

@@ -428,4 +428,6 @@ FodyWeavers.xsd
**/docs/*
**/doc/*
**/pose_iter_160000.caffemodel
# End of https://www.toptal.com/developers/gitignore/api/c++,visualstudio,visualstudiocode,opencv

View File

View File

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -151,9 +151,16 @@ namespace computervision
drawVectorPoints(frame, filtered_finger_points, color_yellow, false);
putText(frame, to_string(filtered_finger_points.size()), center_bounding_rect, FONT_HERSHEY_PLAIN, 3, color_purple);
amount_of_fingers = filtered_finger_points.size();
return contours_image;
}
int FingerCount::getAmountOfFingers()
{
return amount_of_fingers;
}
double FingerCount::findPointsDistance(Point a, Point b) {
Point difference = a - b;
return sqrt(difference.ddot(difference));

View File

@@ -24,6 +24,13 @@ namespace computervision
*/
Mat findFingersCount(Mat input_image, Mat frame);
/**
* @brief gets the currently held-up finger count.
*
* @return the currently held-up finger count
*/
int getAmountOfFingers();
private:
// colors to use
Scalar color_blue;
@@ -34,6 +41,8 @@ namespace computervision
Scalar color_yellow;
Scalar color_purple;
int amount_of_fingers;
/**
* @brief finds the distance between 2 points.
*

View File

@@ -1,47 +1,70 @@
#include <opencv2/videoio.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/video.hpp>
#include "ObjectDetection.h"
#include "BackgroundRemover.h"
#include "SkinDetector.h"
#include "FaceDetector.h"
#include "FingerCount.h"
#include "async/StaticCameraInstance.h"
namespace computervision
{
cv::VideoCapture cap(0);
cv::Mat img, imgGray, img2, img2Gray, img3, img4;
int handMaskStartXPos, handMaskStartYPos, handMaskWidth, handMaskHeight;
bool handMaskGenerated = false;
Mat frame, frameOut, handMask, foreground, fingerCountDebug;
BackgroundRemover backgroundRemover;
SkinDetector skinDetector;
FaceDetector faceDetector;
FingerCount fingerCount;
cv::VideoCapture cap = static_camera::getCap();
ObjectDetection::ObjectDetection()
{
}
bool ObjectDetection::setup()
cv::Mat ObjectDetection::readCamera() {
cap.read(img);
return img;
}
cv::VideoCapture ObjectDetection::getCap()
{
if (!cap.isOpened()) {
cout << "Can't find camera!" << endl;
return false;
}
return cap;
}
cap.read(frame);
frameOut = frame.clone();
bool ObjectDetection::detectHand(Mat cameraFrame)
{
Mat inputFrame = generateHandMaskSquare(cameraFrame);
frameOut = inputFrame.clone();
// detect skin color
skinDetector.drawSkinColorSampler(frameOut);
foreground = backgroundRemover.getForeground(frame);
// remove background from image
foreground = backgroundRemover.getForeground(inputFrame);
faceDetector.removeFaces(frame, foreground);
// detect the hand contours
handMask = skinDetector.getSkinMask(foreground);
// count the amount of fingers and put the info on the matrix
fingerCountDebug = fingerCount.findFingersCount(handMask, frameOut);
//backgroundRemover.calibrate(frame);
// get the amount of fingers
int fingers_amount = fingerCount.getAmountOfFingers();
// draw the hand rectangle on the camera input, and draw text showing if the hand is open or closed.
drawHandMaskRect(&cameraFrame);
string hand_text = fingers_amount > 0 ? "open" : "closed";
putText(cameraFrame,hand_text, Point(10, 75), FONT_HERSHEY_PLAIN, 2.0, Scalar(255, 0, 255),3);
imshow("camera", cameraFrame);
imshow("output", frameOut);
imshow("foreground", foreground);
@@ -50,12 +73,12 @@ namespace computervision
int key = waitKey(1);
if (key == 98) // b
backgroundRemover.calibrate(frame);
else if (key == 115) // s
skinDetector.calibrate(frame);
if (key == 98) // b, calibrate the background
backgroundRemover.calibrate(inputFrame);
else if (key == 115) // s, calibrate the skin color
skinDetector.calibrate(inputFrame);
return true;
return fingers_amount > 0;
}
void ObjectDetection::calculateDifference()
@@ -72,14 +95,32 @@ namespace computervision
imshow("threshold", img4);
}
void ObjectDetection::detect()
{
int key = waitKey(1);
if (key == 98) // b
backgroundRemover.calibrate(frame);
else if (key == 115) // s
skinDetector.calibrate(frame);
cv::Mat ObjectDetection::generateHandMaskSquare(cv::Mat img)
{
handMaskStartXPos = 20;
handMaskStartYPos = img.rows / 5;
handMaskWidth = img.cols / 3;
handMaskHeight = img.cols / 3;
cv::Mat mask = cv::Mat::zeros(img.size(), img.type());
cv::Mat dstImg = cv::Mat::zeros(img.size(), img.type());
cv::rectangle(mask, Rect(handMaskStartXPos, handMaskStartYPos, handMaskWidth, handMaskHeight), Scalar(255, 255, 255), -1);
img.copyTo(dstImg, mask);
handMaskGenerated = true;
return dstImg;
}
bool ObjectDetection::drawHandMaskRect(cv::Mat* input)
{
if (!handMaskGenerated) return false;
rectangle(*input, Rect(handMaskStartXPos, handMaskStartYPos, handMaskWidth, handMaskHeight), Scalar(255, 255, 255));
return true;
}
void ObjectDetection::showWebcam()

View File

@@ -22,13 +22,7 @@ namespace computervision
*
*/
ObjectDetection();
/**
* @brief Initializes the object detection, captures a frame and modifies it
* so it is ready to use for object detection
*
* @return return true if webcam is connected, returns false if it isn't
*/
bool setup();
/**
* @brief Displays an image of the current webcam-footage
*
@@ -40,11 +34,39 @@ namespace computervision
*
*/
void calculateDifference();
/**
* @brief Listens for keypresses and handles them
* @brief generates the square that will hold the mask in which the hand will be detected.
*
* @param img the current camear frame
* @return a matrix containing the mask
*/
void detect();
cv::Mat generateHandMaskSquare(cv::Mat img);
/**
* @brief reads the camera and returns it in a matrix.
*
* @return the camera frame in a matrix
*/
cv::Mat readCamera();
/**
* @brief detects a hand based on the given hand mask input frame.
*
* @param inputFrame the input frame from the camera
* @return true if hand is open, false if hand is closed
*/
bool detectHand(cv::Mat cameraFrame);
/**
* @brief draws the hand mask rectangle on the given input matrix.
*
* @param input the input matrix to draw the rectangle on
*/
bool drawHandMaskRect(cv::Mat *input);
cv::VideoCapture getCap();
};

View File

@@ -0,0 +1,108 @@
#include "OpenPoseVideo.h"
using namespace std;
using namespace cv;
using namespace cv::dnn;
namespace computervision
{
#define MPI
#ifdef MPI
const int POSE_PAIRS[7][2] =
{
{0,1}, {1,2}, {2,3},
{3,4}, {1,5}, {5,6},
{6,7}
};
string protoFile = "res/pose/mpi/pose_deploy_linevec_faster_4_stages.prototxt";
string weightsFile = "res/pose/mpi/pose_iter_160000.caffemodel";
int nPoints = 8;
#endif
#ifdef COCO
const int POSE_PAIRS[17][2] =
{
{1,2}, {1,5}, {2,3},
{3,4}, {5,6}, {6,7},
{1,8}, {8,9}, {9,10},
{1,11}, {11,12}, {12,13},
{1,0}, {0,14},
{14,16}, {0,15}, {15,17}
};
string protoFile = "pose/coco/pose_deploy_linevec.prototxt";
string weightsFile = "pose/coco/pose_iter_440000.caffemodel";
int nPoints = 18;
#endif
Net net;
void OpenPoseVideo::setup() {
net = readNetFromCaffe(protoFile, weightsFile);
net.setPreferableBackend(DNN_TARGET_CPU);
}
void OpenPoseVideo::movementSkeleton(Mat& inputImage, std::function<void(std::vector<Point>&, cv::Mat& poinst_on_image)> f) {
std::cout << "movement skeleton start" << std::endl;
int inWidth = 368;
int inHeight = 368;
float thresh = 0.01;
Mat frame;
int frameWidth = inputImage.size().width;
int frameHeight = inputImage.size().height;
double t = (double)cv::getTickCount();
std::cout << "reading input image and blob" << std::endl;
frame = inputImage;
Mat inpBlob = blobFromImage(frame, 1.0 / 255, Size(inWidth, inHeight), Scalar(0, 0, 0), false, false);
std::cout << "done reading image and blob" << std::endl;
net.setInput(inpBlob);
std::cout << "done setting input to net" << std::endl;
Mat output = net.forward();
std::cout << "time took to set input and forward: " << t << std::endl;
int H = output.size[2];
int W = output.size[3];
std::cout << "about to find position of boxy parts" << std::endl;
// find the position of the body parts
vector<Point> points(nPoints);
for (int n = 0; n < nPoints; n++)
{
// Probability map of corresponding body's part.
Mat probMap(H, W, CV_32F, output.ptr(0, n));
Point2f p(-1, -1);
Point maxLoc;
double prob;
minMaxLoc(probMap, 0, &prob, 0, &maxLoc);
if (prob > thresh)
{
p = maxLoc;
p.x *= (float)frameWidth / W;
p.y *= (float)frameHeight / H;
circle(frame, cv::Point((int)p.x, (int)p.y), 8, Scalar(0, 255, 255), -1);
cv::putText(frame, cv::format("%d", n), cv::Point((int)p.x, (int)p.y), cv::FONT_HERSHEY_COMPLEX, 1.1, cv::Scalar(0, 0, 255), 2);
}
points[n] = p;
}
cv::putText(frame, cv::format("time taken = %.2f sec", t), cv::Point(50, 50), cv::FONT_HERSHEY_COMPLEX, .8, cv::Scalar(255, 50, 0), 2);
std::cout << "time taken: " << t << std::endl;
//imshow("Output-Keypoints", frame);
//imshow("Output-Skeleton", frame);
std::cout << "about to call points receiving method" << std::endl;
f(points,frame);
}
}

View File

@@ -0,0 +1,19 @@
#pragma once
#include <opencv2/dnn.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/highgui.hpp>
#include <iostream>
using namespace cv;
namespace computervision
{
class OpenPoseVideo{
private:
public:
void movementSkeleton(Mat& inputImage, std::function<void(std::vector<Point>&, cv::Mat& poinst_on_image)> f);
void setup();
};
}

View File

@@ -22,7 +22,7 @@ namespace computervision
void SkinDetector::drawSkinColorSampler(Mat input) {
int frameWidth = input.size().width, frameHeight = input.size().height;
int rectangleSize = 20;
int rectangleSize = 25;
Scalar rectangleColor = Scalar(255, 0, 255);
skinColorSamplerRectangle1 = Rect(frameWidth / 5, frameHeight / 2, rectangleSize, rectangleSize);

View File

@@ -0,0 +1,12 @@
#pragma once
#include <opencv2/videoio.hpp>
namespace static_camera
{
static cv::VideoCapture getCap()
{
static cv::VideoCapture cap(0);
return cap;
}
};

View File

@@ -0,0 +1,46 @@
#include <iostream>
#include "async_arm_detection.h"
#include "../OpenPoseVideo.h"
#include <thread>
#include "StaticCameraInstance.h"
namespace computervision
{
AsyncArmDetection::AsyncArmDetection()
{
}
void AsyncArmDetection::run_arm_detection(std::function<void(std::vector<Point>, cv::Mat poinst_on_image)> points_ready_func, OpenPoseVideo op)
{
VideoCapture cap = static_camera::getCap();
std::cout << "STARTING THREAD LAMBDA" << std::endl;
/*cv::VideoCapture cap = static_camera::getCap();*/
if (!cap.isOpened())
{
std::cout << "capture was closed, opening..." << std::endl;
cap.open(0);
}
while (true)
{
Mat img;
cap.read(img);
op.movementSkeleton(img, points_ready_func);
}
}
void AsyncArmDetection::start(std::function<void(std::vector<Point>, cv::Mat poinst_on_image)> points_ready_func, OpenPoseVideo op)
{
std::cout << "starting function" << std::endl;
std::thread async_arm_detect_thread(&AsyncArmDetection::run_arm_detection,this, points_ready_func, op);
async_arm_detect_thread.detach(); // makes sure the thread is detached from the variable.
}
}

View File

@@ -0,0 +1,23 @@
#pragma once
#include <vector>
#include <opencv2/core/types.hpp>
#include <opencv2/videoio.hpp>
#include <functional>
#include "../OpenPoseVideo.h"
#include "StaticCameraInstance.h"
namespace computervision
{
class AsyncArmDetection
{
public:
AsyncArmDetection(void);
void start(std::function<void(std::vector<cv::Point>, cv::Mat poinst_on_image)>, computervision::OpenPoseVideo op);
private:
void run_arm_detection(std::function<void(std::vector<Point>, cv::Mat poinst_on_image)> points_ready_func, OpenPoseVideo op);
};
}

View File

@@ -1,10 +1,12 @@
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <glm/gtc/matrix_transform.hpp>
#include <functional>
#include <vector>
#define STB_IMAGE_IMPLEMENTATION
#include "stb_image.h"
#include <ostream>
#include <map>
#include <stdlib.h>
#include <opencv2/core.hpp>
#include <opencv2/videoio.hpp>
@@ -17,11 +19,11 @@
#include "shaders/static_shader.h"
#include "toolbox/toolbox.h"
#include "scenes/scene.h"
#include "scenes/startupScene.h"
#include "scenes/inGameScene.h"
#include "computervision/ObjectDetection.h"
//#include "computervision/OpenPoseImage.h"
#include "computervision/OpenPoseVideo.h"
#include "computervision/async/async_arm_detection.h"
#pragma comment(lib, "glfw3.lib")
#pragma comment(lib, "glew32s.lib")
@@ -30,95 +32,114 @@
static double UpdateDelta();
static GLFWwindow* window;
bool points_img_available = false;
cv::Mat points_img;
//Scene management variables
std::map<Scenes, Scene*> scenes;
Scene* current_scene = nullptr;
void retrieve_points(std::vector<Point> arm_points, cv::Mat points_on_image)
{
std::cout << "got points!!" << std::endl;
std::cout << "points: " << arm_points << std::endl;
points_img = points_on_image;
points_img_available = true;
}
int main(void)
{
#pragma region OPENGL_SETTINGS
if (!glfwInit())
throw "Could not inditialize glwf";
window = glfwCreateWindow(WINDOW_WIDTH, WINDOW_HEIGT, "SDBA", NULL, NULL);
if (!window)
{
glfwTerminate();
throw "Could not initialize glwf";
}
glfwMakeContextCurrent(window);
glewInit();
glGetError();
#pragma endregion
#pragma region OPENGL_SETTINGS
if (!glfwInit())
throw "Could not inditialize glwf";
window = glfwCreateWindow(WINDOW_WIDTH, WINDOW_HEIGT, "SDBA", NULL, NULL);
if (!window)
{
glfwTerminate();
throw "Could not initialize glwf";
}
glfwMakeContextCurrent(window);
glewInit();
glGetError();
#pragma endregion
glfwSetKeyCallback(window, [](GLFWwindow* window, int key, int scancode, int action, int mods)
{
current_scene->onKey(key, scancode, action, mods);
if (key == GLFW_KEY_ESCAPE)
glfwSetWindowShouldClose(window, true);
});
scenes[Scenes::STARTUP] = new StartupScene();
scenes[Scenes::INGAME] = new InGameScene();
models::RawModel raw_model = LoadObjModel("res/Tree.obj");
models::ModelTexture texture = { render_engine::loader::LoadTexture("res/TreeTexture.png") };
models::TexturedModel model = { raw_model, texture };
entities::Entity entity(model, glm::vec3(0, -5, -20), glm::vec3(0, 0, 0), 1);
shaders::StaticShader shader;
shader.Init();
render_engine::renderer::Init(shader);
entities::Camera camera(glm::vec3(0, 0, 0), glm::vec3(0, 0, 0));
// create object detection object instance
computervision::ObjectDetection objDetect;
glfwSetKeyCallback(window, [](GLFWwindow* window, int key, int scancode, int action, int mods)
{
if (key == GLFW_KEY_ESCAPE)
glfwSetWindowShouldClose(window, true);
});
// set up object detection
//objDetect.setup();
models::RawModel raw_model = LoadObjModel("res/Tree.obj");
models::ModelTexture texture = { render_engine::loader::LoadTexture("res/TreeTexture.png") };
models::TexturedModel model = { raw_model, texture };
entities::Entity entity(model, glm::vec3(0, -5, -20), glm::vec3(0, 0, 0), 1);
shaders::StaticShader shader;
shader.Init();
render_engine::renderer::Init(shader);
entities::Camera camera(glm::vec3(0, 0, 0), glm::vec3(0, 0, 0));
// create object detection object instance
computervision::ObjectDetection objDetect;
//computervision::OpenPoseImage openPoseImage;
computervision::OpenPoseVideo openPoseVideo;
openPoseVideo.setup();
// set up object detection
//objDetect.setup();
//cv::VideoCapture cam = objDetect.getCap();
cv::Mat img;
cv::VideoCapture cap = objDetect.getCap();
//cam.read(img);
//imshow("camera in main loop", img);
computervision::AsyncArmDetection as;
as.start(retrieve_points,openPoseVideo);
current_scene->start();
// Main game loop
while (!glfwWindowShouldClose(window))
{
// Update
const double delta = UpdateDelta();
entity.IncreaseRotation(glm::vec3(0, 1, 0));
camera.Move(window);
current_scene->update(window);
// Update
const double delta = UpdateDelta();
entity.IncreaseRotation(glm::vec3(0, 1, 0));
camera.Move(window);
// Render
render_engine::renderer::Prepare();
shader.Start();
shader.LoadViewMatrix(camera);
current_scene->render();
render_engine::renderer::Render(entity, shader);
render_engine::renderer::Prepare();
shader.Start();
shader.LoadViewMatrix(camera);
//objDetect.setup();
objDetect.calculateDifference();
render_engine::renderer::Render(entity, shader);
//objDetect.detectHand(cameraFrame);
if (points_img_available)
{
imshow("points", points_img);
points_img_available = false;
}
// Finish up
shader.Stop();
shader.Stop();
glfwSwapBuffers(window);
glfwPollEvents();
}
// Clean up
shader.CleanUp();
render_engine::loader::CleanUp();
current_scene->stop();
shader.CleanUp();
render_engine::loader::CleanUp();
glfwTerminate();
return 0;
return 0;
}
static double UpdateDelta()
{
double current_time = glfwGetTime();
static double last_frame_time = current_time;
double delt_time = current_time - last_frame_time;
last_frame_time = current_time;
return delt_time;
double current_time = glfwGetTime();
static double last_frame_time = current_time;
double delt_time = current_time - last_frame_time;
last_frame_time = current_time;
return delt_time;
}

View File

@@ -18,7 +18,7 @@ namespace render_engine
void Init(shaders::StaticShader& shader)
{
const glm::mat4 projectionMatrix =
glm::perspective(glm::radians(FOV), (WINDOW_WIDTH / WINDOW_HEIGT), NEAR_PLANE, FAR_PLANE);
glm::perspective(glm::radians(FOV), (float)(WINDOW_WIDTH / WINDOW_HEIGT), NEAR_PLANE, FAR_PLANE);
shader.Start();
shader.LoadProjectionMatrix(projectionMatrix);

View File

@@ -1,30 +0,0 @@
#include "inGameScene.h"
#include <GLFW/glfw3.h>
void start()
{
}
void stop()
{
}
void render()
{
}
void update(GLFWwindow* window)
{
}
void onKey(int key, int scancode, int action, int mods)
{
/**
* misschien iets van als niet in settings dan hoeft alleen escape een knop zijn als reserve optie. Als wel in settings, dan heb je hetzelfde hoe je in het in het begin scherm hebt.
**/
}

View File

@@ -1,15 +0,0 @@
#pragma once
#include "scene.h"
class InGameScene : public Scene
{
private:
public:
virtual void start() override;
virtual void stop() override;
virtual void render() override;
virtual void update(GLFWwindow* window) override;
virtual void onKey(int key, int scancode, int action, int mods) override;
};

View File

@@ -1 +0,0 @@
#include "scene.h"

View File

@@ -1,23 +0,0 @@
#pragma once
#include <GLFW/glfw3.h>
class Scene
{
public:
virtual void start() = 0;
virtual void stop() = 0;
virtual void render() = 0;
virtual void update(GLFWwindow* window) = 0;
virtual void onKey(int key, int scancode, int action, int mods) {};
};
enum class Scenes
{
STARTUP,
INGAME,
GAMEOVER,
SETTINGS,
CALIBRATION
};

View File

@@ -1,31 +0,0 @@
#include "startupScene.h"
#include <GLFW/glfw3.h>
void start()
{
}
void stop()
{
}
void render()
{
}
void update(GLFWwindow* window)
{
}
void onKey(int key, int scancode, int action, int mods)
{
if (key == GLFW_KEY_DOWN && action == GLFW_RELEASE)
{
//ideetje voor het scrollen door het menu heen
menuIndex = (menuIndex + 1) % 4;
}
}

View File

@@ -1,15 +0,0 @@
#pragma once
#include "scene.h"
class StartupScene : public Scene
{
private:
int menuIndex;
public:
virtual void start() override;
virtual void stop() override;
virtual void render() override;
virtual void update(GLFWwindow* window) override;
virtual void onKey(int key, int scancode, int action, int mods) override;
};

View File

@@ -19,10 +19,10 @@
</ProjectConfiguration>
</ItemGroup>
<ItemGroup>
<ClCompile Include="src\scenes\inGameScene.cpp" />
<ClCompile Include="src\scenes\scene.cpp" />
<ClCompile Include="src\computervision\async\async_arm_detection.cpp" />
<ClCompile Include="src\computervision\FaceDetector.cpp" />
<ClCompile Include="src\computervision\ObjectDetection.cpp" />
<ClCompile Include="src\computervision\OpenPoseVideo.cpp" />
<ClCompile Include="src\computervision\SkinDetector.cpp" />
<ClCompile Include="src\computervision\FingerCount.cpp" />
<ClCompile Include="src\computervision\BackgroundRemover.cpp" />
@@ -35,14 +35,14 @@
<ClCompile Include="src\shaders\shader_program.cpp" />
<ClCompile Include="src\shaders\static_shader.cpp" />
<ClCompile Include="src\toolbox\toolbox.cpp" />
<ClCompile Include="src\scenes\startupScene.cpp" />
</ItemGroup>
<ItemGroup>
<ClInclude Include="src\scenes\inGameScene.h" />
<ClInclude Include="src\scenes\scene.h" />
<ClInclude Include="src\computervision\async\async_arm_detection.h" />
<ClInclude Include="src\computervision\async\StaticCameraInstance.h" />
<ClInclude Include="src\computervision\FaceDetector.h" />
<ClInclude Include="src\computervision\FingerCount.h" />
<ClInclude Include="src\computervision\BackgroundRemover.h" />
<ClInclude Include="src\computervision\OpenPoseVideo.h" />
<ClInclude Include="src\computervision\SkinDetector.h" />
<ClInclude Include="src\computervision\ObjectDetection.h" />
<ClInclude Include="src\entities\camera.h" />
@@ -55,11 +55,16 @@
<ClInclude Include="src\shaders\static_shader.h" />
<ClInclude Include="src\stb_image.h" />
<ClInclude Include="src\toolbox\toolbox.h" />
<ClInclude Include="src\scenes\startupScene.h" />
</ItemGroup>
<ItemGroup>
<Xml Include="res\haarcascade_frontalface_alt.xml" />
</ItemGroup>
<ItemGroup>
<None Include="..\..\Avans Hogeschool\Kim Veldhoen - Proftaak 2.4\pose_iter_160000.caffemodel" />
<None Include="res\pose\coco\pose_deploy_linevec.prototxt" />
<None Include="res\pose\mpi\pose_deploy_linevec_faster_4_stages.prototxt" />
<None Include="res\pose\mpi\pose_iter_160000.caffemodel" />
</ItemGroup>
<PropertyGroup Label="Globals">
<VCProjectVersion>16.0</VCProjectVersion>
<ProjectGuid>{A7ECF1BE-DB22-4BF7-BFF6-E3BF72691EE6}</ProjectGuid>
@@ -126,8 +131,8 @@
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<LinkIncremental>false</LinkIncremental>
<IncludePath>$(VC_IncludePath);$(WindowsSDK_IncludePath);;C:\opencv\opencv\build\include</IncludePath>
<LibraryPath>$(VC_LibraryPath_x64);$(WindowsSDK_LibraryPath_x64);C:\opencv\opencv\build\x64\vc15\lib</LibraryPath>
<IncludePath>C:\opencv\build\include\;$(VC_IncludePath);$(WindowsSDK_IncludePath);C:\opencv\opencv\build\include</IncludePath>
<LibraryPath>C:\opencv\build\x64\vc15\lib;$(VC_LibraryPath_x64);$(WindowsSDK_LibraryPath_x64);C:\opencv\opencv\build\x64\vc15\lib</LibraryPath>
</PropertyGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<ClCompile>
@@ -200,7 +205,7 @@
<OptimizeReferences>true</OptimizeReferences>
<GenerateDebugInformation>true</GenerateDebugInformation>
<AdditionalLibraryDirectories>$(SolutionDir)lib\glfw-3.3.2\$(Platform);$(SolutionDir)lib\glew-2.1.0\lib\Release\$(Platform);%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
<AdditionalDependencies>kernel32.lib;user32.lib;gdi32.lib;winspool.lib;comdlg32.lib;advapi32.lib;shell32.lib;ole32.lib;oleaut32.lib;uuid.lib;odbc32.lib;odbccp32.lib;%(AdditionalDependencies); opencv_world452.lib</AdditionalDependencies>
<AdditionalDependencies>opencv_world452.lib;kernel32.lib;user32.lib;gdi32.lib;winspool.lib;comdlg32.lib;advapi32.lib;shell32.lib;ole32.lib;oleaut32.lib;uuid.lib;odbc32.lib;odbccp32.lib;%(AdditionalDependencies)</AdditionalDependencies>
</Link>
</ItemDefinitionGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />

View File

@@ -57,13 +57,10 @@
<ClCompile Include="src\computervision\BackgroundRemover.cpp">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="src\scenes\scene.cpp">
<ClCompile Include="src\computervision\OpenPoseVideo.cpp">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="src\scenes\startupScene.cpp">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="src\scenes\inGameScene.cpp">
<ClCompile Include="src\computervision\async\async_arm_detection.cpp">
<Filter>Source Files</Filter>
</ClCompile>
</ItemGroup>
@@ -113,17 +110,23 @@
<ClInclude Include="src\computervision\BackgroundRemover.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="src\scenes\scene.h">
<ClInclude Include="src\computervision\OpenPoseVideo.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="src\scenes\startupScene.h">
<ClInclude Include="src\computervision\async\async_arm_detection.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="src\scenes\inGameScene.h">
<ClInclude Include="src\computervision\async\StaticCameraInstance.h">
<Filter>Header Files</Filter>
</ClInclude>
</ItemGroup>
<ItemGroup>
<Xml Include="res\haarcascade_frontalface_alt.xml" />
</ItemGroup>
<ItemGroup>
<None Include="res\pose\coco\pose_deploy_linevec.prototxt" />
<None Include="res\pose\mpi\pose_deploy_linevec_faster_4_stages.prototxt" />
<None Include="res\pose\mpi\pose_iter_160000.caffemodel" />
<None Include="..\..\Avans Hogeschool\Kim Veldhoen - Proftaak 2.4\pose_iter_160000.caffemodel" />
</ItemGroup>
</Project>