1 Commits

Author SHA1 Message Date
Lars
1f2258bc01 [ADD] skeleton code for different scenes 2021-05-25 15:52:41 +02:00
26 changed files with 234 additions and 5518 deletions

2
.gitignore vendored
View File

@@ -428,6 +428,4 @@ FodyWeavers.xsd
**/docs/*
**/doc/*
**/pose_iter_160000.caffemodel
# End of https://www.toptal.com/developers/gitignore/api/c++,visualstudio,visualstudiocode,opencv

0
gameoverScene.cpp Normal file
View File

0
gameoverScene.h Normal file
View File

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -151,16 +151,9 @@ namespace computervision
drawVectorPoints(frame, filtered_finger_points, color_yellow, false);
putText(frame, to_string(filtered_finger_points.size()), center_bounding_rect, FONT_HERSHEY_PLAIN, 3, color_purple);
amount_of_fingers = filtered_finger_points.size();
return contours_image;
}
int FingerCount::getAmountOfFingers()
{
return amount_of_fingers;
}
double FingerCount::findPointsDistance(Point a, Point b) {
Point difference = a - b;
return sqrt(difference.ddot(difference));

View File

@@ -24,13 +24,6 @@ namespace computervision
*/
Mat findFingersCount(Mat input_image, Mat frame);
/**
* @brief gets the currently held-up finger count.
*
* @return the currently held-up finger count
*/
int getAmountOfFingers();
private:
// colors to use
Scalar color_blue;
@@ -41,8 +34,6 @@ namespace computervision
Scalar color_yellow;
Scalar color_purple;
int amount_of_fingers;
/**
* @brief finds the distance between 2 points.
*

View File

@@ -1,66 +1,47 @@
#include <opencv2/highgui.hpp>
#include "ObjectDetection.h"
#include "BackgroundRemover.h"
#include "SkinDetector.h"
#include "FaceDetector.h"
#include "FingerCount.h"
#include "VideoCapture.h"
using namespace videocapture;
namespace computervision
{
cv::VideoCapture cap(0);
cv::Mat img, imgGray, img2, img2Gray, img3, img4;
int handMaskStartXPos, handMaskStartYPos, handMaskWidth, handMaskHeight;
bool handMaskGenerated = false;
Mat frame, frameOut, handMask, foreground, fingerCountDebug;
BackgroundRemover backgroundRemover;
SkinDetector skinDetector;
FaceDetector faceDetector;
FingerCount fingerCount;
ObjectDetection::ObjectDetection()
{
}
cv::Mat ObjectDetection::readCamera() {
/*videocapture::getMutex()->lock();
videocapture::getCap().read(img);
videocapture::getMutex()->unlock();*/
img = videocapture::readFrame();
return img;
bool ObjectDetection::setup()
{
if (!cap.isOpened()) {
cout << "Can't find camera!" << endl;
return false;
}
bool ObjectDetection::detectHand(Mat cameraFrame)
{
Mat inputFrame = generateHandMaskSquare(cameraFrame);
frameOut = inputFrame.clone();
cap.read(frame);
frameOut = frame.clone();
// detect skin color
skinDetector.drawSkinColorSampler(frameOut);
// remove background from image
foreground = backgroundRemover.getForeground(inputFrame);
foreground = backgroundRemover.getForeground(frame);
// detect the hand contours
faceDetector.removeFaces(frame, foreground);
handMask = skinDetector.getSkinMask(foreground);
// count the amount of fingers and put the info on the matrix
fingerCountDebug = fingerCount.findFingersCount(handMask, frameOut);
// get the amount of fingers
int fingers_amount = fingerCount.getAmountOfFingers();
//backgroundRemover.calibrate(frame);
// draw the hand rectangle on the camera input, and draw text showing if the hand is open or closed.
drawHandMaskRect(&cameraFrame);
string hand_text = fingers_amount > 0 ? "open" : "closed";
putText(cameraFrame,hand_text, Point(10, 75), FONT_HERSHEY_PLAIN, 2.0, Scalar(255, 0, 255),3);
imshow("camera", cameraFrame);
imshow("output", frameOut);
imshow("foreground", foreground);
@@ -69,23 +50,18 @@ namespace computervision
int key = waitKey(1);
if (key == 98) // b, calibrate the background
backgroundRemover.calibrate(inputFrame);
else if (key == 115) // s, calibrate the skin color
skinDetector.calibrate(inputFrame);
if (key == 98) // b
backgroundRemover.calibrate(frame);
else if (key == 115) // s
skinDetector.calibrate(frame);
return fingers_amount > 0;
return true;
}
void ObjectDetection::calculateDifference()
{
//videocapture::getMutex()->lock();
//videocapture::getCap().read(img);
//videocapture::getCap().read(img2);
//videocapture::getMutex()->unlock()
img = videocapture::readFrame();
img2 = videocapture::readFrame();
cap.read(img);
cap.read(img2);
cv::cvtColor(img, imgGray, cv::COLOR_RGBA2GRAY);
cv::cvtColor(img2, img2Gray, cv::COLOR_RGBA2GRAY);
@@ -96,32 +72,14 @@ namespace computervision
imshow("threshold", img4);
}
cv::Mat ObjectDetection::generateHandMaskSquare(cv::Mat img)
void ObjectDetection::detect()
{
handMaskStartXPos = 20;
handMaskStartYPos = img.rows / 5;
handMaskWidth = img.cols / 3;
handMaskHeight = img.cols / 3;
int key = waitKey(1);
cv::Mat mask = cv::Mat::zeros(img.size(), img.type());
cv::Mat dstImg = cv::Mat::zeros(img.size(), img.type());
cv::rectangle(mask, Rect(handMaskStartXPos, handMaskStartYPos, handMaskWidth, handMaskHeight), Scalar(255, 255, 255), -1);
img.copyTo(dstImg, mask);
handMaskGenerated = true;
return dstImg;
}
bool ObjectDetection::drawHandMaskRect(cv::Mat* input)
{
if (!handMaskGenerated) return false;
rectangle(*input, Rect(handMaskStartXPos, handMaskStartYPos, handMaskWidth, handMaskHeight), Scalar(255, 255, 255));
return true;
if (key == 98) // b
backgroundRemover.calibrate(frame);
else if (key == 115) // s
skinDetector.calibrate(frame);
}
void ObjectDetection::showWebcam()

View File

@@ -22,7 +22,13 @@ namespace computervision
*
*/
ObjectDetection();
/**
* @brief Initializes the object detection, captures a frame and modifies it
* so it is ready to use for object detection
*
* @return return true if webcam is connected, returns false if it isn't
*/
bool setup();
/**
* @brief Displays an image of the current webcam-footage
*
@@ -34,38 +40,11 @@ namespace computervision
*
*/
void calculateDifference();
/**
* @brief generates the square that will hold the mask in which the hand will be detected.
* @brief Listens for keypresses and handles them
*
* @param img the current camear frame
* @return a matrix containing the mask
*/
cv::Mat generateHandMaskSquare(cv::Mat img);
/**
* @brief reads the camera and returns it in a matrix.
*
* @return the camera frame in a matrix
*/
cv::Mat readCamera();
/**
* @brief detects a hand based on the given hand mask input frame.
*
* @param inputFrame the input frame from the camera
* @return true if hand is open, false if hand is closed
*/
bool detectHand(cv::Mat cameraFrame);
/**
* @brief draws the hand mask rectangle on the given input matrix.
*
* @param input the input matrix to draw the rectangle on
*/
bool drawHandMaskRect(cv::Mat *input);
cv::VideoCapture getCap();
void detect();
};

View File

@@ -1,111 +0,0 @@
#include "OpenPoseVideo.h"
using namespace std;
using namespace cv;
using namespace cv::dnn;
namespace computervision
{
#define MPI
#ifdef MPI
const int POSE_PAIRS[7][2] =
{
{0,1}, {1,2}, {2,3},
{3,4}, {1,5}, {5,6},
{6,7}
};
string protoFile = "res/pose/mpi/pose_deploy_linevec_faster_4_stages.prototxt";
string weightsFile = "res/pose/mpi/pose_iter_160000.caffemodel";
int nPoints = 8;
#endif
#ifdef COCO
const int POSE_PAIRS[17][2] =
{
{1,2}, {1,5}, {2,3},
{3,4}, {5,6}, {6,7},
{1,8}, {8,9}, {9,10},
{1,11}, {11,12}, {12,13},
{1,0}, {0,14},
{14,16}, {0,15}, {15,17}
};
string protoFile = "pose/coco/pose_deploy_linevec.prototxt";
string weightsFile = "pose/coco/pose_iter_440000.caffemodel";
int nPoints = 18;
#endif
Net net;
int inWidth = 368;
int inHeight = 368;
float thresh = 0.01;
void OpenPoseVideo::setup() {
net = readNetFromCaffe(protoFile, weightsFile);
}
cv::Mat OpenPoseVideo::getBlobFromImage(cv::Mat inputImage)
{
Mat frame;
int frameWidth = inputImage.size().width;
int frameHeight = inputImage.size().height;
double t = (double)cv::getTickCount();
std::cout << "reading input image and blob" << std::endl;
frame = inputImage.clone();
Mat inpBlob = blobFromImage(frame, 1.0 / 255, Size(inWidth, inHeight), Scalar(0, 0, 0), false, false);
return inpBlob;
}
void OpenPoseVideo::movementSkeleton(Mat inputImage, Mat inpBlob, std::function<void(std::vector<Point>)> f) {
std::cout << "movement skeleton start" << std::endl;
int frameWidth = inputImage.size().width;
int frameHeight = inputImage.size().height;
std::cout << "done reading image and blob" << std::endl;
net.setInput(inpBlob);
std::cout << "done setting input to net" << std::endl;
Mat output = net.forward();
int H = output.size[2];
int W = output.size[3];
std::cout << "about to find position of boxy parts" << std::endl;
// find the position of the body parts
vector<Point> points(nPoints);
for (int n = 0; n < nPoints; n++)
{
// Probability map of corresponding body's part.
Mat probMap(H, W, CV_32F, output.ptr(0, n));
Point2f p(-1, -1);
Point maxLoc;
double prob;
minMaxLoc(probMap, 0, &prob, 0, &maxLoc);
if (prob > thresh)
{
p = maxLoc;
p.x *= (float)frameWidth / W;
p.y *= (float)frameHeight / H;
/*circle(frame, cv::Point((int)p.x, (int)p.y), 8, Scalar(0, 255, 255), -1);
cv::putText(frame, cv::format("%d", n), cv::Point((int)p.x, (int)p.y), cv::FONT_HERSHEY_COMPLEX, 1.1, cv::Scalar(0, 0, 255), 2);*/
}
points[n] = p;
}
//cv::putText(frame, cv::format("time taken = %.2f sec", t), cv::Point(50, 50), cv::FONT_HERSHEY_COMPLEX, .8, cv::Scalar(255, 50, 0), 2);
// imshow("Output-Keypoints", frameCopy);
/*imshow("Output-Skeleton", frame);*/
std::cout << "about to call points receiving method" << std::endl;
f(points);
}
}

View File

@@ -1,20 +0,0 @@
#pragma once
#include <opencv2/dnn.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/highgui.hpp>
#include <iostream>
using namespace cv;
namespace computervision
{
class OpenPoseVideo{
private:
public:
cv::Mat getBlobFromImage(cv::Mat inputImage);
void movementSkeleton(Mat inputImage, Mat inpBlob, std::function<void(std::vector<Point>)> f);
void setup();
};
}

View File

@@ -22,7 +22,7 @@ namespace computervision
void SkinDetector::drawSkinColorSampler(Mat input) {
int frameWidth = input.size().width, frameHeight = input.size().height;
int rectangleSize = 25;
int rectangleSize = 20;
Scalar rectangleColor = Scalar(255, 0, 255);
skinColorSamplerRectangle1 = Rect(frameWidth / 5, frameHeight / 2, rectangleSize, rectangleSize);

View File

@@ -1,33 +0,0 @@
#include "VideoCapture.h"
#include <mutex>
#include <iostream>
namespace videocapture{
static cv::VideoCapture cap(0);
static std::mutex mtx;
cv::VideoCapture getCap() {
cap.release();
return cap;
}
cv::Mat readFrame()
{
std::cout << "reading frame" << std::endl;
cv::Mat camFrame, videoFrame;
mtx.lock();
bool res = cap.read(camFrame);
std::cout << (res ? "reading worked" : "reading failed") << std::endl;
videoFrame = camFrame.clone();
mtx.unlock();
return videoFrame;
}
std::mutex* getMutex()
{
return &mtx;
}
}

View File

@@ -1,12 +0,0 @@
#pragma once
#include <opencv2/videoio.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/video.hpp>
#include <mutex>
namespace videocapture {
cv::VideoCapture getCap();
std::mutex* getMutex();
cv::Mat readFrame();
}

View File

@@ -1,41 +0,0 @@
#include <iostream>
#include "async_arm_detection.h"
#include "../OpenPoseVideo.h"
#include <thread>
#include "../VideoCapture.h"
#include <opencv2/dnn.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/highgui.hpp>
namespace computervision
{
AsyncArmDetection::AsyncArmDetection()
{
}
void AsyncArmDetection::run_arm_detection()
{
}
void AsyncArmDetection::start(std::function<void(std::vector<Point>)> points_ready_func, OpenPoseVideo op)
{
auto lambda = [](cv::Mat img, std::function<void(std::vector<Point>)> f, OpenPoseVideo op, cv::Mat inpBlob) {
std::cout << "STARTING THREAD LAMBDA" << std::endl;
//imshow("image", img); 255, Size(368, 368), Scalar(0, 0, 0), false, false);
op.movementSkeleton(img, inpBlob, f);
//}
};
cv::Mat img = videocapture::readFrame();
std::cout << "starting function" << std::endl;
cv::Mat inpBlob = op.getBlobFromImage(videocapture::readFrame());
std::thread async_arm_detect_thread(lambda, img, points_ready_func, op, inpBlob);
}
}

View File

@@ -1,22 +0,0 @@
#pragma once
#include <vector>
#include <opencv2/core/types.hpp>
#include <opencv2/videoio.hpp>
#include <functional>
#include "../OpenPoseVideo.h"
namespace computervision
{
class AsyncArmDetection
{
public:
AsyncArmDetection(void);
void start(std::function<void(std::vector<cv::Point>)>, computervision::OpenPoseVideo op);
private:
void run_arm_detection();
};
}

View File

@@ -1,12 +1,10 @@
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <glm/gtc/matrix_transform.hpp>
#include <functional>
#include <vector>
#define STB_IMAGE_IMPLEMENTATION
#include "stb_image.h"
#include <ostream>
#include <stdlib.h>
#include <map>
#include <opencv2/core.hpp>
#include <opencv2/videoio.hpp>
@@ -19,11 +17,11 @@
#include "shaders/static_shader.h"
#include "toolbox/toolbox.h"
#include "computervision/ObjectDetection.h"
//#include "computervision/OpenPoseImage.h"
#include "computervision/OpenPoseVideo.h"
#include "scenes/scene.h"
#include "scenes/startupScene.h"
#include "scenes/inGameScene.h"
#include "computervision/async/async_arm_detection.h"
#include "computervision/ObjectDetection.h"
#pragma comment(lib, "glfw3.lib")
#pragma comment(lib, "glew32s.lib")
@@ -32,15 +30,11 @@
static double UpdateDelta();
static GLFWwindow* window;
computervision::AsyncArmDetection as;
computervision::OpenPoseVideo openPoseVideo;
void retrieve_points(std::vector<Point> arm_points)
{
std::cout << "got points!!" << std::endl;
std::cout << "points: " << arm_points << std::endl;
as.start(retrieve_points, openPoseVideo);
}
//Scene management variables
std::map<Scenes, Scene*> scenes;
Scene* current_scene = nullptr;
int main(void)
{
@@ -60,10 +54,13 @@ int main(void)
glfwSetKeyCallback(window, [](GLFWwindow* window, int key, int scancode, int action, int mods)
{
current_scene->onKey(key, scancode, action, mods);
if (key == GLFW_KEY_ESCAPE)
glfwSetWindowShouldClose(window, true);
});
scenes[Scenes::STARTUP] = new StartupScene();
scenes[Scenes::INGAME] = new InGameScene();
models::RawModel raw_model = LoadObjModel("res/Tree.obj");
models::ModelTexture texture = { render_engine::loader::LoadTexture("res/TreeTexture.png") };
@@ -78,22 +75,12 @@ int main(void)
// create object detection object instance
computervision::ObjectDetection objDetect;
//computervision::OpenPoseImage openPoseImage;
openPoseVideo.setup();
// set up object detection
//objDetect.setup();
cv::Mat cameraFrame;
//openPoseVideo.setup();
as.start(retrieve_points, openPoseVideo);
current_scene->start();
// Main game loop
while (!glfwWindowShouldClose(window))
{
@@ -101,17 +88,17 @@ int main(void)
const double delta = UpdateDelta();
entity.IncreaseRotation(glm::vec3(0, 1, 0));
camera.Move(window);
current_scene->update(window);
// Render
render_engine::renderer::Prepare();
shader.Start();
shader.LoadViewMatrix(camera);
current_scene->render();
render_engine::renderer::Render(entity, shader);
cameraFrame = objDetect.readCamera();
//objDetect.detectHand(cameraFrame);
//objDetect.setup();
objDetect.calculateDifference();
// Finish up
shader.Stop();
@@ -122,6 +109,7 @@ int main(void)
// Clean up
shader.CleanUp();
render_engine::loader::CleanUp();
current_scene->stop();
glfwTerminate();
return 0;
}

View File

@@ -18,7 +18,7 @@ namespace render_engine
void Init(shaders::StaticShader& shader)
{
const glm::mat4 projectionMatrix =
glm::perspective(glm::radians(FOV), (float)(WINDOW_WIDTH / WINDOW_HEIGT), NEAR_PLANE, FAR_PLANE);
glm::perspective(glm::radians(FOV), (WINDOW_WIDTH / WINDOW_HEIGT), NEAR_PLANE, FAR_PLANE);
shader.Start();
shader.LoadProjectionMatrix(projectionMatrix);

View File

@@ -0,0 +1,30 @@
#include "inGameScene.h"
#include <GLFW/glfw3.h>
void start()
{
}
void stop()
{
}
void render()
{
}
void update(GLFWwindow* window)
{
}
void onKey(int key, int scancode, int action, int mods)
{
/**
* misschien iets van als niet in settings dan hoeft alleen escape een knop zijn als reserve optie. Als wel in settings, dan heb je hetzelfde hoe je in het in het begin scherm hebt.
**/
}

15
src/scenes/inGameScene.h Normal file
View File

@@ -0,0 +1,15 @@
#pragma once
#include "scene.h"
class InGameScene : public Scene
{
private:
public:
virtual void start() override;
virtual void stop() override;
virtual void render() override;
virtual void update(GLFWwindow* window) override;
virtual void onKey(int key, int scancode, int action, int mods) override;
};

1
src/scenes/scene.cpp Normal file
View File

@@ -0,0 +1 @@
#include "scene.h"

23
src/scenes/scene.h Normal file
View File

@@ -0,0 +1,23 @@
#pragma once
#include <GLFW/glfw3.h>
class Scene
{
public:
virtual void start() = 0;
virtual void stop() = 0;
virtual void render() = 0;
virtual void update(GLFWwindow* window) = 0;
virtual void onKey(int key, int scancode, int action, int mods) {};
};
enum class Scenes
{
STARTUP,
INGAME,
GAMEOVER,
SETTINGS,
CALIBRATION
};

View File

@@ -0,0 +1,31 @@
#include "startupScene.h"
#include <GLFW/glfw3.h>
void start()
{
}
void stop()
{
}
void render()
{
}
void update(GLFWwindow* window)
{
}
void onKey(int key, int scancode, int action, int mods)
{
if (key == GLFW_KEY_DOWN && action == GLFW_RELEASE)
{
//ideetje voor het scrollen door het menu heen
menuIndex = (menuIndex + 1) % 4;
}
}

15
src/scenes/startupScene.h Normal file
View File

@@ -0,0 +1,15 @@
#pragma once
#include "scene.h"
class StartupScene : public Scene
{
private:
int menuIndex;
public:
virtual void start() override;
virtual void stop() override;
virtual void render() override;
virtual void update(GLFWwindow* window) override;
virtual void onKey(int key, int scancode, int action, int mods) override;
};

View File

@@ -19,14 +19,13 @@
</ProjectConfiguration>
</ItemGroup>
<ItemGroup>
<ClCompile Include="src\computervision\async\async_arm_detection.cpp" />
<ClCompile Include="src\scenes\inGameScene.cpp" />
<ClCompile Include="src\scenes\scene.cpp" />
<ClCompile Include="src\computervision\FaceDetector.cpp" />
<ClCompile Include="src\computervision\ObjectDetection.cpp" />
<ClCompile Include="src\computervision\OpenPoseVideo.cpp" />
<ClCompile Include="src\computervision\SkinDetector.cpp" />
<ClCompile Include="src\computervision\FingerCount.cpp" />
<ClCompile Include="src\computervision\BackgroundRemover.cpp" />
<ClCompile Include="src\computervision\VideoCapture.cpp" />
<ClCompile Include="src\entities\camera.cpp" />
<ClCompile Include="src\entities\entity.cpp" />
<ClCompile Include="src\main.cpp" />
@@ -36,16 +35,16 @@
<ClCompile Include="src\shaders\shader_program.cpp" />
<ClCompile Include="src\shaders\static_shader.cpp" />
<ClCompile Include="src\toolbox\toolbox.cpp" />
<ClCompile Include="src\scenes\startupScene.cpp" />
</ItemGroup>
<ItemGroup>
<ClInclude Include="src\computervision\async\async_arm_detection.h" />
<ClInclude Include="src\scenes\inGameScene.h" />
<ClInclude Include="src\scenes\scene.h" />
<ClInclude Include="src\computervision\FaceDetector.h" />
<ClInclude Include="src\computervision\FingerCount.h" />
<ClInclude Include="src\computervision\BackgroundRemover.h" />
<ClInclude Include="src\computervision\OpenPoseVideo.h" />
<ClInclude Include="src\computervision\SkinDetector.h" />
<ClInclude Include="src\computervision\ObjectDetection.h" />
<ClInclude Include="src\computervision\VideoCapture.h" />
<ClInclude Include="src\entities\camera.h" />
<ClInclude Include="src\entities\entity.h" />
<ClInclude Include="src\models\model.h" />
@@ -56,15 +55,11 @@
<ClInclude Include="src\shaders\static_shader.h" />
<ClInclude Include="src\stb_image.h" />
<ClInclude Include="src\toolbox\toolbox.h" />
<ClInclude Include="src\scenes\startupScene.h" />
</ItemGroup>
<ItemGroup>
<Xml Include="res\haarcascade_frontalface_alt.xml" />
</ItemGroup>
<ItemGroup>
<None Include="res\pose\coco\pose_deploy_linevec.prototxt" />
<None Include="res\pose\mpi\pose_deploy_linevec_faster_4_stages.prototxt" />
<None Include="res\pose\mpi\pose_iter_160000.caffemodel" />
</ItemGroup>
<PropertyGroup Label="Globals">
<VCProjectVersion>16.0</VCProjectVersion>
<ProjectGuid>{A7ECF1BE-DB22-4BF7-BFF6-E3BF72691EE6}</ProjectGuid>

View File

@@ -57,13 +57,13 @@
<ClCompile Include="src\computervision\BackgroundRemover.cpp">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="src\computervision\OpenPoseVideo.cpp">
<ClCompile Include="src\scenes\scene.cpp">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="src\computervision\async\async_arm_detection.cpp">
<ClCompile Include="src\scenes\startupScene.cpp">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="src\computervision\VideoCapture.cpp">
<ClCompile Include="src\scenes\inGameScene.cpp">
<Filter>Source Files</Filter>
</ClCompile>
</ItemGroup>
@@ -113,22 +113,17 @@
<ClInclude Include="src\computervision\BackgroundRemover.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="src\computervision\OpenPoseVideo.h">
<ClInclude Include="src\scenes\scene.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="src\computervision\async\async_arm_detection.h">
<ClInclude Include="src\scenes\startupScene.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="src\computervision\VideoCapture.h">
<ClInclude Include="src\scenes\inGameScene.h">
<Filter>Header Files</Filter>
</ClInclude>
</ItemGroup>
<ItemGroup>
<Xml Include="res\haarcascade_frontalface_alt.xml" />
</ItemGroup>
<ItemGroup>
<None Include="res\pose\coco\pose_deploy_linevec.prototxt" />
<None Include="res\pose\mpi\pose_deploy_linevec_faster_4_stages.prototxt" />
<None Include="res\pose\mpi\pose_iter_160000.caffemodel" />
</ItemGroup>
</Project>