11 Commits

Author SHA1 Message Date
Sem van der Hoeven
612adf6e9b [EDIT] mutex? 2021-05-31 15:50:06 +02:00
Sem van der Hoeven
25c99abb72 [EDIT] mutex with cap 2021-05-31 10:25:24 +02:00
Jasper
9d80cddbd1 [BUG] image still doesnt work 2021-05-28 16:49:13 +02:00
Sem van der Hoeven
40529f84b3 [ADD] basis for async arm detection 2021-05-28 15:31:21 +02:00
Jasper
a68c6a57bf [EDIT] edited file 2021-05-28 12:32:10 +02:00
Jasper
078a6ce66d [ADD] added all the files 2021-05-28 12:27:12 +02:00
Sem van der Hoeven
f1f1aac93d [ADD] comments 2021-05-25 15:54:02 +02:00
Sem van der Hoeven
563f465e2c [EDIT] remove unused methods 2021-05-25 15:46:53 +02:00
Sem van der Hoeven
05ae8ee019 [FEATURE] finished hand open/closed recognition 2021-05-25 14:49:04 +02:00
Sem van der Hoeven
3696e2eb30 [EDIT] improve hand detection with mask 2021-05-25 14:19:18 +02:00
Sem van der Hoeven
276aa1a449 [ADD] mask methods 2021-05-25 13:31:25 +02:00
26 changed files with 5518 additions and 234 deletions

2
.gitignore vendored
View File

@@ -428,4 +428,6 @@ FodyWeavers.xsd
**/docs/*
**/doc/*
**/pose_iter_160000.caffemodel
# End of https://www.toptal.com/developers/gitignore/api/c++,visualstudio,visualstudiocode,opencv

View File

View File

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -151,9 +151,16 @@ namespace computervision
drawVectorPoints(frame, filtered_finger_points, color_yellow, false);
putText(frame, to_string(filtered_finger_points.size()), center_bounding_rect, FONT_HERSHEY_PLAIN, 3, color_purple);
amount_of_fingers = filtered_finger_points.size();
return contours_image;
}
int FingerCount::getAmountOfFingers()
{
return amount_of_fingers;
}
double FingerCount::findPointsDistance(Point a, Point b) {
Point difference = a - b;
return sqrt(difference.ddot(difference));

View File

@@ -24,6 +24,13 @@ namespace computervision
*/
Mat findFingersCount(Mat input_image, Mat frame);
/**
* @brief gets the currently held-up finger count.
*
* @return the currently held-up finger count
*/
int getAmountOfFingers();
private:
// colors to use
Scalar color_blue;
@@ -34,6 +41,8 @@ namespace computervision
Scalar color_yellow;
Scalar color_purple;
int amount_of_fingers;
/**
* @brief finds the distance between 2 points.
*

View File

@@ -1,47 +1,66 @@
#include <opencv2/highgui.hpp>
#include "ObjectDetection.h"
#include "BackgroundRemover.h"
#include "SkinDetector.h"
#include "FaceDetector.h"
#include "FingerCount.h"
#include "VideoCapture.h"
using namespace videocapture;
namespace computervision
{
cv::VideoCapture cap(0);
cv::Mat img, imgGray, img2, img2Gray, img3, img4;
int handMaskStartXPos, handMaskStartYPos, handMaskWidth, handMaskHeight;
bool handMaskGenerated = false;
Mat frame, frameOut, handMask, foreground, fingerCountDebug;
BackgroundRemover backgroundRemover;
SkinDetector skinDetector;
FaceDetector faceDetector;
FingerCount fingerCount;
ObjectDetection::ObjectDetection()
{
}
bool ObjectDetection::setup()
cv::Mat ObjectDetection::readCamera() {
/*videocapture::getMutex()->lock();
videocapture::getCap().read(img);
videocapture::getMutex()->unlock();*/
img = videocapture::readFrame();
return img;
}
bool ObjectDetection::detectHand(Mat cameraFrame)
{
if (!cap.isOpened()) {
cout << "Can't find camera!" << endl;
return false;
}
cap.read(frame);
frameOut = frame.clone();
Mat inputFrame = generateHandMaskSquare(cameraFrame);
frameOut = inputFrame.clone();
// detect skin color
skinDetector.drawSkinColorSampler(frameOut);
foreground = backgroundRemover.getForeground(frame);
// remove background from image
foreground = backgroundRemover.getForeground(inputFrame);
faceDetector.removeFaces(frame, foreground);
// detect the hand contours
handMask = skinDetector.getSkinMask(foreground);
// count the amount of fingers and put the info on the matrix
fingerCountDebug = fingerCount.findFingersCount(handMask, frameOut);
//backgroundRemover.calibrate(frame);
// get the amount of fingers
int fingers_amount = fingerCount.getAmountOfFingers();
// draw the hand rectangle on the camera input, and draw text showing if the hand is open or closed.
drawHandMaskRect(&cameraFrame);
string hand_text = fingers_amount > 0 ? "open" : "closed";
putText(cameraFrame,hand_text, Point(10, 75), FONT_HERSHEY_PLAIN, 2.0, Scalar(255, 0, 255),3);
imshow("camera", cameraFrame);
imshow("output", frameOut);
imshow("foreground", foreground);
@@ -50,18 +69,23 @@ namespace computervision
int key = waitKey(1);
if (key == 98) // b
backgroundRemover.calibrate(frame);
else if (key == 115) // s
skinDetector.calibrate(frame);
if (key == 98) // b, calibrate the background
backgroundRemover.calibrate(inputFrame);
else if (key == 115) // s, calibrate the skin color
skinDetector.calibrate(inputFrame);
return true;
return fingers_amount > 0;
}
void ObjectDetection::calculateDifference()
{
cap.read(img);
cap.read(img2);
//videocapture::getMutex()->lock();
//videocapture::getCap().read(img);
//videocapture::getCap().read(img2);
//videocapture::getMutex()->unlock()
img = videocapture::readFrame();
img2 = videocapture::readFrame();
cv::cvtColor(img, imgGray, cv::COLOR_RGBA2GRAY);
cv::cvtColor(img2, img2Gray, cv::COLOR_RGBA2GRAY);
@@ -72,14 +96,32 @@ namespace computervision
imshow("threshold", img4);
}
void ObjectDetection::detect()
cv::Mat ObjectDetection::generateHandMaskSquare(cv::Mat img)
{
int key = waitKey(1);
if (key == 98) // b
backgroundRemover.calibrate(frame);
else if (key == 115) // s
skinDetector.calibrate(frame);
handMaskStartXPos = 20;
handMaskStartYPos = img.rows / 5;
handMaskWidth = img.cols / 3;
handMaskHeight = img.cols / 3;
cv::Mat mask = cv::Mat::zeros(img.size(), img.type());
cv::Mat dstImg = cv::Mat::zeros(img.size(), img.type());
cv::rectangle(mask, Rect(handMaskStartXPos, handMaskStartYPos, handMaskWidth, handMaskHeight), Scalar(255, 255, 255), -1);
img.copyTo(dstImg, mask);
handMaskGenerated = true;
return dstImg;
}
bool ObjectDetection::drawHandMaskRect(cv::Mat* input)
{
if (!handMaskGenerated) return false;
rectangle(*input, Rect(handMaskStartXPos, handMaskStartYPos, handMaskWidth, handMaskHeight), Scalar(255, 255, 255));
return true;
}
void ObjectDetection::showWebcam()

View File

@@ -22,13 +22,7 @@ namespace computervision
*
*/
ObjectDetection();
/**
* @brief Initializes the object detection, captures a frame and modifies it
* so it is ready to use for object detection
*
* @return return true if webcam is connected, returns false if it isn't
*/
bool setup();
/**
* @brief Displays an image of the current webcam-footage
*
@@ -40,11 +34,38 @@ namespace computervision
*
*/
void calculateDifference();
/**
* @brief Listens for keypresses and handles them
* @brief generates the square that will hold the mask in which the hand will be detected.
*
* @param img the current camear frame
* @return a matrix containing the mask
*/
void detect();
cv::Mat generateHandMaskSquare(cv::Mat img);
/**
* @brief reads the camera and returns it in a matrix.
*
* @return the camera frame in a matrix
*/
cv::Mat readCamera();
/**
* @brief detects a hand based on the given hand mask input frame.
*
* @param inputFrame the input frame from the camera
* @return true if hand is open, false if hand is closed
*/
bool detectHand(cv::Mat cameraFrame);
/**
* @brief draws the hand mask rectangle on the given input matrix.
*
* @param input the input matrix to draw the rectangle on
*/
bool drawHandMaskRect(cv::Mat *input);
cv::VideoCapture getCap();
};

View File

@@ -0,0 +1,111 @@
#include "OpenPoseVideo.h"
using namespace std;
using namespace cv;
using namespace cv::dnn;
namespace computervision
{
#define MPI
#ifdef MPI
const int POSE_PAIRS[7][2] =
{
{0,1}, {1,2}, {2,3},
{3,4}, {1,5}, {5,6},
{6,7}
};
string protoFile = "res/pose/mpi/pose_deploy_linevec_faster_4_stages.prototxt";
string weightsFile = "res/pose/mpi/pose_iter_160000.caffemodel";
int nPoints = 8;
#endif
#ifdef COCO
const int POSE_PAIRS[17][2] =
{
{1,2}, {1,5}, {2,3},
{3,4}, {5,6}, {6,7},
{1,8}, {8,9}, {9,10},
{1,11}, {11,12}, {12,13},
{1,0}, {0,14},
{14,16}, {0,15}, {15,17}
};
string protoFile = "pose/coco/pose_deploy_linevec.prototxt";
string weightsFile = "pose/coco/pose_iter_440000.caffemodel";
int nPoints = 18;
#endif
Net net;
int inWidth = 368;
int inHeight = 368;
float thresh = 0.01;
void OpenPoseVideo::setup() {
net = readNetFromCaffe(protoFile, weightsFile);
}
cv::Mat OpenPoseVideo::getBlobFromImage(cv::Mat inputImage)
{
Mat frame;
int frameWidth = inputImage.size().width;
int frameHeight = inputImage.size().height;
double t = (double)cv::getTickCount();
std::cout << "reading input image and blob" << std::endl;
frame = inputImage.clone();
Mat inpBlob = blobFromImage(frame, 1.0 / 255, Size(inWidth, inHeight), Scalar(0, 0, 0), false, false);
return inpBlob;
}
void OpenPoseVideo::movementSkeleton(Mat inputImage, Mat inpBlob, std::function<void(std::vector<Point>)> f) {
std::cout << "movement skeleton start" << std::endl;
int frameWidth = inputImage.size().width;
int frameHeight = inputImage.size().height;
std::cout << "done reading image and blob" << std::endl;
net.setInput(inpBlob);
std::cout << "done setting input to net" << std::endl;
Mat output = net.forward();
int H = output.size[2];
int W = output.size[3];
std::cout << "about to find position of boxy parts" << std::endl;
// find the position of the body parts
vector<Point> points(nPoints);
for (int n = 0; n < nPoints; n++)
{
// Probability map of corresponding body's part.
Mat probMap(H, W, CV_32F, output.ptr(0, n));
Point2f p(-1, -1);
Point maxLoc;
double prob;
minMaxLoc(probMap, 0, &prob, 0, &maxLoc);
if (prob > thresh)
{
p = maxLoc;
p.x *= (float)frameWidth / W;
p.y *= (float)frameHeight / H;
/*circle(frame, cv::Point((int)p.x, (int)p.y), 8, Scalar(0, 255, 255), -1);
cv::putText(frame, cv::format("%d", n), cv::Point((int)p.x, (int)p.y), cv::FONT_HERSHEY_COMPLEX, 1.1, cv::Scalar(0, 0, 255), 2);*/
}
points[n] = p;
}
//cv::putText(frame, cv::format("time taken = %.2f sec", t), cv::Point(50, 50), cv::FONT_HERSHEY_COMPLEX, .8, cv::Scalar(255, 50, 0), 2);
// imshow("Output-Keypoints", frameCopy);
/*imshow("Output-Skeleton", frame);*/
std::cout << "about to call points receiving method" << std::endl;
f(points);
}
}

View File

@@ -0,0 +1,20 @@
#pragma once
#include <opencv2/dnn.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/highgui.hpp>
#include <iostream>
using namespace cv;
namespace computervision
{
class OpenPoseVideo{
private:
public:
cv::Mat getBlobFromImage(cv::Mat inputImage);
void movementSkeleton(Mat inputImage, Mat inpBlob, std::function<void(std::vector<Point>)> f);
void setup();
};
}

View File

@@ -22,7 +22,7 @@ namespace computervision
void SkinDetector::drawSkinColorSampler(Mat input) {
int frameWidth = input.size().width, frameHeight = input.size().height;
int rectangleSize = 20;
int rectangleSize = 25;
Scalar rectangleColor = Scalar(255, 0, 255);
skinColorSamplerRectangle1 = Rect(frameWidth / 5, frameHeight / 2, rectangleSize, rectangleSize);

View File

@@ -0,0 +1,33 @@
#include "VideoCapture.h"
#include <mutex>
#include <iostream>
namespace videocapture{
static cv::VideoCapture cap(0);
static std::mutex mtx;
cv::VideoCapture getCap() {
cap.release();
return cap;
}
cv::Mat readFrame()
{
std::cout << "reading frame" << std::endl;
cv::Mat camFrame, videoFrame;
mtx.lock();
bool res = cap.read(camFrame);
std::cout << (res ? "reading worked" : "reading failed") << std::endl;
videoFrame = camFrame.clone();
mtx.unlock();
return videoFrame;
}
std::mutex* getMutex()
{
return &mtx;
}
}

View File

@@ -0,0 +1,12 @@
#pragma once
#include <opencv2/videoio.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/video.hpp>
#include <mutex>
namespace videocapture {
cv::VideoCapture getCap();
std::mutex* getMutex();
cv::Mat readFrame();
}

View File

@@ -0,0 +1,41 @@
#include <iostream>
#include "async_arm_detection.h"
#include "../OpenPoseVideo.h"
#include <thread>
#include "../VideoCapture.h"
#include <opencv2/dnn.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/highgui.hpp>
namespace computervision
{
AsyncArmDetection::AsyncArmDetection()
{
}
void AsyncArmDetection::run_arm_detection()
{
}
void AsyncArmDetection::start(std::function<void(std::vector<Point>)> points_ready_func, OpenPoseVideo op)
{
auto lambda = [](cv::Mat img, std::function<void(std::vector<Point>)> f, OpenPoseVideo op, cv::Mat inpBlob) {
std::cout << "STARTING THREAD LAMBDA" << std::endl;
//imshow("image", img); 255, Size(368, 368), Scalar(0, 0, 0), false, false);
op.movementSkeleton(img, inpBlob, f);
//}
};
cv::Mat img = videocapture::readFrame();
std::cout << "starting function" << std::endl;
cv::Mat inpBlob = op.getBlobFromImage(videocapture::readFrame());
std::thread async_arm_detect_thread(lambda, img, points_ready_func, op, inpBlob);
}
}

View File

@@ -0,0 +1,22 @@
#pragma once
#include <vector>
#include <opencv2/core/types.hpp>
#include <opencv2/videoio.hpp>
#include <functional>
#include "../OpenPoseVideo.h"
namespace computervision
{
class AsyncArmDetection
{
public:
AsyncArmDetection(void);
void start(std::function<void(std::vector<cv::Point>)>, computervision::OpenPoseVideo op);
private:
void run_arm_detection();
};
}

View File

@@ -1,10 +1,12 @@
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <glm/gtc/matrix_transform.hpp>
#include <functional>
#include <vector>
#define STB_IMAGE_IMPLEMENTATION
#include "stb_image.h"
#include <ostream>
#include <map>
#include <stdlib.h>
#include <opencv2/core.hpp>
#include <opencv2/videoio.hpp>
@@ -17,11 +19,11 @@
#include "shaders/static_shader.h"
#include "toolbox/toolbox.h"
#include "scenes/scene.h"
#include "scenes/startupScene.h"
#include "scenes/inGameScene.h"
#include "computervision/ObjectDetection.h"
//#include "computervision/OpenPoseImage.h"
#include "computervision/OpenPoseVideo.h"
#include "computervision/async/async_arm_detection.h"
#pragma comment(lib, "glfw3.lib")
#pragma comment(lib, "glew32s.lib")
@@ -30,95 +32,105 @@
static double UpdateDelta();
static GLFWwindow* window;
computervision::AsyncArmDetection as;
computervision::OpenPoseVideo openPoseVideo;
//Scene management variables
std::map<Scenes, Scene*> scenes;
Scene* current_scene = nullptr;
void retrieve_points(std::vector<Point> arm_points)
{
std::cout << "got points!!" << std::endl;
std::cout << "points: " << arm_points << std::endl;
as.start(retrieve_points, openPoseVideo);
}
int main(void)
{
#pragma region OPENGL_SETTINGS
if (!glfwInit())
throw "Could not inditialize glwf";
window = glfwCreateWindow(WINDOW_WIDTH, WINDOW_HEIGT, "SDBA", NULL, NULL);
if (!window)
{
glfwTerminate();
throw "Could not initialize glwf";
}
glfwMakeContextCurrent(window);
glewInit();
glGetError();
#pragma endregion
#pragma region OPENGL_SETTINGS
if (!glfwInit())
throw "Could not inditialize glwf";
window = glfwCreateWindow(WINDOW_WIDTH, WINDOW_HEIGT, "SDBA", NULL, NULL);
if (!window)
{
glfwTerminate();
throw "Could not initialize glwf";
}
glfwMakeContextCurrent(window);
glewInit();
glGetError();
#pragma endregion
glfwSetKeyCallback(window, [](GLFWwindow* window, int key, int scancode, int action, int mods)
{
current_scene->onKey(key, scancode, action, mods);
if (key == GLFW_KEY_ESCAPE)
glfwSetWindowShouldClose(window, true);
});
glfwSetKeyCallback(window, [](GLFWwindow* window, int key, int scancode, int action, int mods)
{
if (key == GLFW_KEY_ESCAPE)
glfwSetWindowShouldClose(window, true);
});
models::RawModel raw_model = LoadObjModel("res/Tree.obj");
models::ModelTexture texture = { render_engine::loader::LoadTexture("res/TreeTexture.png") };
models::TexturedModel model = { raw_model, texture };
entities::Entity entity(model, glm::vec3(0, -5, -20), glm::vec3(0, 0, 0), 1);
shaders::StaticShader shader;
shader.Init();
render_engine::renderer::Init(shader);
entities::Camera camera(glm::vec3(0, 0, 0), glm::vec3(0, 0, 0));
// create object detection object instance
computervision::ObjectDetection objDetect;
//computervision::OpenPoseImage openPoseImage;
scenes[Scenes::STARTUP] = new StartupScene();
scenes[Scenes::INGAME] = new InGameScene();
openPoseVideo.setup();
// set up object detection
//objDetect.setup();
cv::Mat cameraFrame;
//openPoseVideo.setup();
models::RawModel raw_model = LoadObjModel("res/Tree.obj");
models::ModelTexture texture = { render_engine::loader::LoadTexture("res/TreeTexture.png") };
models::TexturedModel model = { raw_model, texture };
entities::Entity entity(model, glm::vec3(0, -5, -20), glm::vec3(0, 0, 0), 1);
as.start(retrieve_points, openPoseVideo);
shaders::StaticShader shader;
shader.Init();
render_engine::renderer::Init(shader);
entities::Camera camera(glm::vec3(0, 0, 0), glm::vec3(0, 0, 0));
// create object detection object instance
computervision::ObjectDetection objDetect;
// set up object detection
//objDetect.setup();
current_scene->start();
// Main game loop
while (!glfwWindowShouldClose(window))
{
// Update
const double delta = UpdateDelta();
entity.IncreaseRotation(glm::vec3(0, 1, 0));
camera.Move(window);
current_scene->update(window);
// Update
const double delta = UpdateDelta();
entity.IncreaseRotation(glm::vec3(0, 1, 0));
camera.Move(window);
// Render
render_engine::renderer::Prepare();
shader.Start();
shader.LoadViewMatrix(camera);
current_scene->render();
render_engine::renderer::Render(entity, shader);
render_engine::renderer::Prepare();
shader.Start();
shader.LoadViewMatrix(camera);
//objDetect.setup();
objDetect.calculateDifference();
render_engine::renderer::Render(entity, shader);
cameraFrame = objDetect.readCamera();
//objDetect.detectHand(cameraFrame);
// Finish up
shader.Stop();
shader.Stop();
glfwSwapBuffers(window);
glfwPollEvents();
}
// Clean up
shader.CleanUp();
render_engine::loader::CleanUp();
current_scene->stop();
shader.CleanUp();
render_engine::loader::CleanUp();
glfwTerminate();
return 0;
return 0;
}
static double UpdateDelta()
{
double current_time = glfwGetTime();
static double last_frame_time = current_time;
double delt_time = current_time - last_frame_time;
last_frame_time = current_time;
return delt_time;
double current_time = glfwGetTime();
static double last_frame_time = current_time;
double delt_time = current_time - last_frame_time;
last_frame_time = current_time;
return delt_time;
}

View File

@@ -18,7 +18,7 @@ namespace render_engine
void Init(shaders::StaticShader& shader)
{
const glm::mat4 projectionMatrix =
glm::perspective(glm::radians(FOV), (WINDOW_WIDTH / WINDOW_HEIGT), NEAR_PLANE, FAR_PLANE);
glm::perspective(glm::radians(FOV), (float)(WINDOW_WIDTH / WINDOW_HEIGT), NEAR_PLANE, FAR_PLANE);
shader.Start();
shader.LoadProjectionMatrix(projectionMatrix);

View File

@@ -1,30 +0,0 @@
#include "inGameScene.h"
#include <GLFW/glfw3.h>
void start()
{
}
void stop()
{
}
void render()
{
}
void update(GLFWwindow* window)
{
}
void onKey(int key, int scancode, int action, int mods)
{
/**
* misschien iets van als niet in settings dan hoeft alleen escape een knop zijn als reserve optie. Als wel in settings, dan heb je hetzelfde hoe je in het in het begin scherm hebt.
**/
}

View File

@@ -1,15 +0,0 @@
#pragma once
#include "scene.h"
class InGameScene : public Scene
{
private:
public:
virtual void start() override;
virtual void stop() override;
virtual void render() override;
virtual void update(GLFWwindow* window) override;
virtual void onKey(int key, int scancode, int action, int mods) override;
};

View File

@@ -1 +0,0 @@
#include "scene.h"

View File

@@ -1,23 +0,0 @@
#pragma once
#include <GLFW/glfw3.h>
class Scene
{
public:
virtual void start() = 0;
virtual void stop() = 0;
virtual void render() = 0;
virtual void update(GLFWwindow* window) = 0;
virtual void onKey(int key, int scancode, int action, int mods) {};
};
enum class Scenes
{
STARTUP,
INGAME,
GAMEOVER,
SETTINGS,
CALIBRATION
};

View File

@@ -1,31 +0,0 @@
#include "startupScene.h"
#include <GLFW/glfw3.h>
void start()
{
}
void stop()
{
}
void render()
{
}
void update(GLFWwindow* window)
{
}
void onKey(int key, int scancode, int action, int mods)
{
if (key == GLFW_KEY_DOWN && action == GLFW_RELEASE)
{
//ideetje voor het scrollen door het menu heen
menuIndex = (menuIndex + 1) % 4;
}
}

View File

@@ -1,15 +0,0 @@
#pragma once
#include "scene.h"
class StartupScene : public Scene
{
private:
int menuIndex;
public:
virtual void start() override;
virtual void stop() override;
virtual void render() override;
virtual void update(GLFWwindow* window) override;
virtual void onKey(int key, int scancode, int action, int mods) override;
};

View File

@@ -19,13 +19,14 @@
</ProjectConfiguration>
</ItemGroup>
<ItemGroup>
<ClCompile Include="src\scenes\inGameScene.cpp" />
<ClCompile Include="src\scenes\scene.cpp" />
<ClCompile Include="src\computervision\async\async_arm_detection.cpp" />
<ClCompile Include="src\computervision\FaceDetector.cpp" />
<ClCompile Include="src\computervision\ObjectDetection.cpp" />
<ClCompile Include="src\computervision\OpenPoseVideo.cpp" />
<ClCompile Include="src\computervision\SkinDetector.cpp" />
<ClCompile Include="src\computervision\FingerCount.cpp" />
<ClCompile Include="src\computervision\BackgroundRemover.cpp" />
<ClCompile Include="src\computervision\VideoCapture.cpp" />
<ClCompile Include="src\entities\camera.cpp" />
<ClCompile Include="src\entities\entity.cpp" />
<ClCompile Include="src\main.cpp" />
@@ -35,16 +36,16 @@
<ClCompile Include="src\shaders\shader_program.cpp" />
<ClCompile Include="src\shaders\static_shader.cpp" />
<ClCompile Include="src\toolbox\toolbox.cpp" />
<ClCompile Include="src\scenes\startupScene.cpp" />
</ItemGroup>
<ItemGroup>
<ClInclude Include="src\scenes\inGameScene.h" />
<ClInclude Include="src\scenes\scene.h" />
<ClInclude Include="src\computervision\async\async_arm_detection.h" />
<ClInclude Include="src\computervision\FaceDetector.h" />
<ClInclude Include="src\computervision\FingerCount.h" />
<ClInclude Include="src\computervision\BackgroundRemover.h" />
<ClInclude Include="src\computervision\OpenPoseVideo.h" />
<ClInclude Include="src\computervision\SkinDetector.h" />
<ClInclude Include="src\computervision\ObjectDetection.h" />
<ClInclude Include="src\computervision\VideoCapture.h" />
<ClInclude Include="src\entities\camera.h" />
<ClInclude Include="src\entities\entity.h" />
<ClInclude Include="src\models\model.h" />
@@ -55,11 +56,15 @@
<ClInclude Include="src\shaders\static_shader.h" />
<ClInclude Include="src\stb_image.h" />
<ClInclude Include="src\toolbox\toolbox.h" />
<ClInclude Include="src\scenes\startupScene.h" />
</ItemGroup>
<ItemGroup>
<Xml Include="res\haarcascade_frontalface_alt.xml" />
</ItemGroup>
<ItemGroup>
<None Include="res\pose\coco\pose_deploy_linevec.prototxt" />
<None Include="res\pose\mpi\pose_deploy_linevec_faster_4_stages.prototxt" />
<None Include="res\pose\mpi\pose_iter_160000.caffemodel" />
</ItemGroup>
<PropertyGroup Label="Globals">
<VCProjectVersion>16.0</VCProjectVersion>
<ProjectGuid>{A7ECF1BE-DB22-4BF7-BFF6-E3BF72691EE6}</ProjectGuid>

View File

@@ -57,13 +57,13 @@
<ClCompile Include="src\computervision\BackgroundRemover.cpp">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="src\scenes\scene.cpp">
<ClCompile Include="src\computervision\OpenPoseVideo.cpp">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="src\scenes\startupScene.cpp">
<ClCompile Include="src\computervision\async\async_arm_detection.cpp">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="src\scenes\inGameScene.cpp">
<ClCompile Include="src\computervision\VideoCapture.cpp">
<Filter>Source Files</Filter>
</ClCompile>
</ItemGroup>
@@ -113,17 +113,22 @@
<ClInclude Include="src\computervision\BackgroundRemover.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="src\scenes\scene.h">
<ClInclude Include="src\computervision\OpenPoseVideo.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="src\scenes\startupScene.h">
<ClInclude Include="src\computervision\async\async_arm_detection.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="src\scenes\inGameScene.h">
<ClInclude Include="src\computervision\VideoCapture.h">
<Filter>Header Files</Filter>
</ClInclude>
</ItemGroup>
<ItemGroup>
<Xml Include="res\haarcascade_frontalface_alt.xml" />
</ItemGroup>
<ItemGroup>
<None Include="res\pose\coco\pose_deploy_linevec.prototxt" />
<None Include="res\pose\mpi\pose_deploy_linevec_faster_4_stages.prototxt" />
<None Include="res\pose\mpi\pose_iter_160000.caffemodel" />
</ItemGroup>
</Project>