Compare commits

6 Commits

Author SHA1 Message Date
Sem van der Hoeven
612adf6e9b [EDIT] mutex? 2021-05-31 15:50:06 +02:00
Sem van der Hoeven
25c99abb72 [EDIT] mutex with cap 2021-05-31 10:25:24 +02:00
Jasper
9d80cddbd1 [BUG] image still doesnt work 2021-05-28 16:49:13 +02:00
Sem van der Hoeven
40529f84b3 [ADD] basis for async arm detection 2021-05-28 15:31:21 +02:00
Jasper
a68c6a57bf [EDIT] edited file 2021-05-28 12:32:10 +02:00
Jasper
078a6ce66d [ADD] added all the files 2021-05-28 12:27:12 +02:00
15 changed files with 5369 additions and 106 deletions

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,25 +0,0 @@
#include "MenuTest.h"
#include <iostream>
namespace computervision
{
int menu_item_array[4] = { 1, 2, 3, 4 };
float item_number = 0;
MenuTest::MenuTest(void) {
}
int MenuTest::GetMenuItem(bool hand_state) {
item_number += 0.20f;
int temp_item_number = item_number;
//If temp_item_number is equal to the size of the array, set item_number bac to zero to loop through the array again
if (temp_item_number == sizeof(menu_item_array) / sizeof(menu_item_array[0])) {
item_number = 0;
}
return menu_item_array[temp_item_number];
}
}

View File

@@ -1,18 +0,0 @@
namespace computervision
{
class MenuTest {
public:
/**
* @brief Constructor for the class MenuTest, loads in array with menu items
*
*/
MenuTest(void);
/**
* @brief Returns the itemnumber in an array
*
* @param input_bool is either true or false, depending on the recognized hand gesture
*/
int GetMenuItem(bool input_bool);
};
}

View File

@@ -1,17 +1,15 @@
#include <opencv2/videoio.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/video.hpp>
#include "ObjectDetection.h"
#include "BackgroundRemover.h"
#include "SkinDetector.h"
#include "FaceDetector.h"
#include "FingerCount.h"
#include "VideoCapture.h"
using namespace videocapture;
namespace computervision
{
cv::VideoCapture cap(0);
cv::Mat img, imgGray, img2, img2Gray, img3, img4;
@@ -29,7 +27,12 @@ namespace computervision
}
cv::Mat ObjectDetection::readCamera() {
cap.read(img);
/*videocapture::getMutex()->lock();
videocapture::getCap().read(img);
videocapture::getMutex()->unlock();*/
img = videocapture::readFrame();
return img;
}
@@ -76,8 +79,13 @@ namespace computervision
void ObjectDetection::calculateDifference()
{
cap.read(img);
cap.read(img2);
//videocapture::getMutex()->lock();
//videocapture::getCap().read(img);
//videocapture::getCap().read(img2);
//videocapture::getMutex()->unlock()
img = videocapture::readFrame();
img2 = videocapture::readFrame();
cv::cvtColor(img, imgGray, cv::COLOR_RGBA2GRAY);
cv::cvtColor(img2, img2Gray, cv::COLOR_RGBA2GRAY);

View File

@@ -65,6 +65,8 @@ namespace computervision
*/
bool drawHandMaskRect(cv::Mat *input);
cv::VideoCapture getCap();
};

View File

@@ -0,0 +1,111 @@
#include "OpenPoseVideo.h"
using namespace std;
using namespace cv;
using namespace cv::dnn;
namespace computervision
{
#define MPI
#ifdef MPI
const int POSE_PAIRS[7][2] =
{
{0,1}, {1,2}, {2,3},
{3,4}, {1,5}, {5,6},
{6,7}
};
string protoFile = "res/pose/mpi/pose_deploy_linevec_faster_4_stages.prototxt";
string weightsFile = "res/pose/mpi/pose_iter_160000.caffemodel";
int nPoints = 8;
#endif
#ifdef COCO
const int POSE_PAIRS[17][2] =
{
{1,2}, {1,5}, {2,3},
{3,4}, {5,6}, {6,7},
{1,8}, {8,9}, {9,10},
{1,11}, {11,12}, {12,13},
{1,0}, {0,14},
{14,16}, {0,15}, {15,17}
};
string protoFile = "pose/coco/pose_deploy_linevec.prototxt";
string weightsFile = "pose/coco/pose_iter_440000.caffemodel";
int nPoints = 18;
#endif
Net net;
int inWidth = 368;
int inHeight = 368;
float thresh = 0.01;
void OpenPoseVideo::setup() {
net = readNetFromCaffe(protoFile, weightsFile);
}
cv::Mat OpenPoseVideo::getBlobFromImage(cv::Mat inputImage)
{
Mat frame;
int frameWidth = inputImage.size().width;
int frameHeight = inputImage.size().height;
double t = (double)cv::getTickCount();
std::cout << "reading input image and blob" << std::endl;
frame = inputImage.clone();
Mat inpBlob = blobFromImage(frame, 1.0 / 255, Size(inWidth, inHeight), Scalar(0, 0, 0), false, false);
return inpBlob;
}
void OpenPoseVideo::movementSkeleton(Mat inputImage, Mat inpBlob, std::function<void(std::vector<Point>)> f) {
std::cout << "movement skeleton start" << std::endl;
int frameWidth = inputImage.size().width;
int frameHeight = inputImage.size().height;
std::cout << "done reading image and blob" << std::endl;
net.setInput(inpBlob);
std::cout << "done setting input to net" << std::endl;
Mat output = net.forward();
int H = output.size[2];
int W = output.size[3];
std::cout << "about to find position of boxy parts" << std::endl;
// find the position of the body parts
vector<Point> points(nPoints);
for (int n = 0; n < nPoints; n++)
{
// Probability map of corresponding body's part.
Mat probMap(H, W, CV_32F, output.ptr(0, n));
Point2f p(-1, -1);
Point maxLoc;
double prob;
minMaxLoc(probMap, 0, &prob, 0, &maxLoc);
if (prob > thresh)
{
p = maxLoc;
p.x *= (float)frameWidth / W;
p.y *= (float)frameHeight / H;
/*circle(frame, cv::Point((int)p.x, (int)p.y), 8, Scalar(0, 255, 255), -1);
cv::putText(frame, cv::format("%d", n), cv::Point((int)p.x, (int)p.y), cv::FONT_HERSHEY_COMPLEX, 1.1, cv::Scalar(0, 0, 255), 2);*/
}
points[n] = p;
}
//cv::putText(frame, cv::format("time taken = %.2f sec", t), cv::Point(50, 50), cv::FONT_HERSHEY_COMPLEX, .8, cv::Scalar(255, 50, 0), 2);
// imshow("Output-Keypoints", frameCopy);
/*imshow("Output-Skeleton", frame);*/
std::cout << "about to call points receiving method" << std::endl;
f(points);
}
}

View File

@@ -0,0 +1,20 @@
#pragma once
#include <opencv2/dnn.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/highgui.hpp>
#include <iostream>
using namespace cv;
namespace computervision
{
class OpenPoseVideo{
private:
public:
cv::Mat getBlobFromImage(cv::Mat inputImage);
void movementSkeleton(Mat inputImage, Mat inpBlob, std::function<void(std::vector<Point>)> f);
void setup();
};
}

View File

@@ -0,0 +1,33 @@
#include "VideoCapture.h"
#include <mutex>
#include <iostream>
namespace videocapture{
static cv::VideoCapture cap(0);
static std::mutex mtx;
cv::VideoCapture getCap() {
cap.release();
return cap;
}
cv::Mat readFrame()
{
std::cout << "reading frame" << std::endl;
cv::Mat camFrame, videoFrame;
mtx.lock();
bool res = cap.read(camFrame);
std::cout << (res ? "reading worked" : "reading failed") << std::endl;
videoFrame = camFrame.clone();
mtx.unlock();
return videoFrame;
}
std::mutex* getMutex()
{
return &mtx;
}
}

View File

@@ -0,0 +1,12 @@
#pragma once
#include <opencv2/videoio.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/video.hpp>
#include <mutex>
namespace videocapture {
cv::VideoCapture getCap();
std::mutex* getMutex();
cv::Mat readFrame();
}

View File

@@ -0,0 +1,41 @@
#include <iostream>
#include "async_arm_detection.h"
#include "../OpenPoseVideo.h"
#include <thread>
#include "../VideoCapture.h"
#include <opencv2/dnn.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/highgui.hpp>
namespace computervision
{
AsyncArmDetection::AsyncArmDetection()
{
}
void AsyncArmDetection::run_arm_detection()
{
}
void AsyncArmDetection::start(std::function<void(std::vector<Point>)> points_ready_func, OpenPoseVideo op)
{
auto lambda = [](cv::Mat img, std::function<void(std::vector<Point>)> f, OpenPoseVideo op, cv::Mat inpBlob) {
std::cout << "STARTING THREAD LAMBDA" << std::endl;
//imshow("image", img); 255, Size(368, 368), Scalar(0, 0, 0), false, false);
op.movementSkeleton(img, inpBlob, f);
//}
};
cv::Mat img = videocapture::readFrame();
std::cout << "starting function" << std::endl;
cv::Mat inpBlob = op.getBlobFromImage(videocapture::readFrame());
std::thread async_arm_detect_thread(lambda, img, points_ready_func, op, inpBlob);
}
}

View File

@@ -0,0 +1,22 @@
#pragma once
#include <vector>
#include <opencv2/core/types.hpp>
#include <opencv2/videoio.hpp>
#include <functional>
#include "../OpenPoseVideo.h"
namespace computervision
{
class AsyncArmDetection
{
public:
AsyncArmDetection(void);
void start(std::function<void(std::vector<cv::Point>)>, computervision::OpenPoseVideo op);
private:
void run_arm_detection();
};
}

View File

@@ -1,12 +1,12 @@
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <glm/gtc/matrix_transform.hpp>
#include <functional>
#include <vector>
#define STB_IMAGE_IMPLEMENTATION
#include "stb_image.h"
#include <ostream>
#include <stdlib.h>
#include <iostream>
#include <Windows.h>
#include <opencv2/core.hpp>
#include <opencv2/videoio.hpp>
@@ -19,8 +19,11 @@
#include "shaders/static_shader.h"
#include "toolbox/toolbox.h"
#include "computervision/MenuTest.h"
#include "computervision/ObjectDetection.h"
//#include "computervision/OpenPoseImage.h"
#include "computervision/OpenPoseVideo.h"
#include "computervision/async/async_arm_detection.h"
#pragma comment(lib, "glfw3.lib")
#pragma comment(lib, "glew32s.lib")
@@ -29,7 +32,15 @@
static double UpdateDelta();
static GLFWwindow* window;
int chosen_item = 0;
computervision::AsyncArmDetection as;
computervision::OpenPoseVideo openPoseVideo;
void retrieve_points(std::vector<Point> arm_points)
{
std::cout << "got points!!" << std::endl;
std::cout << "points: " << arm_points << std::endl;
as.start(retrieve_points, openPoseVideo);
}
int main(void)
{
@@ -67,12 +78,22 @@ int main(void)
// create object detection object instance
computervision::ObjectDetection objDetect;
//computervision::OpenPoseImage openPoseImage;
openPoseVideo.setup();
// set up object detection
//objDetect.setup();
cv::Mat cameraFrame;
//openPoseVideo.setup();
as.start(retrieve_points, openPoseVideo);
// Main game loop
while (!glfwWindowShouldClose(window))
{
@@ -90,54 +111,7 @@ int main(void)
render_engine::renderer::Render(entity, shader);
cameraFrame = objDetect.readCamera();
////////////////////////// KIMS SHIT ////////////////////////////////////
computervision::MenuTest menu_test;
//Get hand state from camera
bool hand_detection = objDetect.detectHand(cameraFrame);
if (hand_detection)
{
std::cout << "hand is opened" << std::endl;
//Loop through menu items
chosen_item = menu_test.GetMenuItem(true);
//For debug only, to see if chosen item is selected properly when hand is opened
std::cout << "chosen item: " << chosen_item << std::endl;
}
else if (!hand_detection)
{
//for debug only, to see if the chosen item is selected properly when hand is closed
std::cout << "hand is closed" << std::endl;
//std::cout << "item to start: " << chosen_item << std::endl;
//TODO link chosen item to the correct game states
switch (chosen_item)
{
case 1:
//Game state 0
std::cout << "in case: " << chosen_item << std::endl;
break;
case 2:
//Game state 1
std::cout << "in case: " << chosen_item << std::endl;
break;
case 3:
//Game state 2
std::cout << "in case: " << chosen_item << std::endl;
break;
case 4:
//Game state 3
std::cout << "in case: " << chosen_item << std::endl;
default:
break;
}
}
///////////////////////// END OF KIMS SHIT ///////////////////////////////
//objDetect.detectHand(cameraFrame);
// Finish up
shader.Stop();

View File

@@ -19,12 +19,14 @@
</ProjectConfiguration>
</ItemGroup>
<ItemGroup>
<ClCompile Include="src\computervision\async\async_arm_detection.cpp" />
<ClCompile Include="src\computervision\FaceDetector.cpp" />
<ClCompile Include="src\computervision\MenuTest.cpp" />
<ClCompile Include="src\computervision\ObjectDetection.cpp" />
<ClCompile Include="src\computervision\OpenPoseVideo.cpp" />
<ClCompile Include="src\computervision\SkinDetector.cpp" />
<ClCompile Include="src\computervision\FingerCount.cpp" />
<ClCompile Include="src\computervision\BackgroundRemover.cpp" />
<ClCompile Include="src\computervision\VideoCapture.cpp" />
<ClCompile Include="src\entities\camera.cpp" />
<ClCompile Include="src\entities\entity.cpp" />
<ClCompile Include="src\main.cpp" />
@@ -36,12 +38,14 @@
<ClCompile Include="src\toolbox\toolbox.cpp" />
</ItemGroup>
<ItemGroup>
<ClInclude Include="src\computervision\async\async_arm_detection.h" />
<ClInclude Include="src\computervision\FaceDetector.h" />
<ClInclude Include="src\computervision\FingerCount.h" />
<ClInclude Include="src\computervision\BackgroundRemover.h" />
<ClInclude Include="src\computervision\MenuTest.h" />
<ClInclude Include="src\computervision\OpenPoseVideo.h" />
<ClInclude Include="src\computervision\SkinDetector.h" />
<ClInclude Include="src\computervision\ObjectDetection.h" />
<ClInclude Include="src\computervision\VideoCapture.h" />
<ClInclude Include="src\entities\camera.h" />
<ClInclude Include="src\entities\entity.h" />
<ClInclude Include="src\models\model.h" />
@@ -56,6 +60,11 @@
<ItemGroup>
<Xml Include="res\haarcascade_frontalface_alt.xml" />
</ItemGroup>
<ItemGroup>
<None Include="res\pose\coco\pose_deploy_linevec.prototxt" />
<None Include="res\pose\mpi\pose_deploy_linevec_faster_4_stages.prototxt" />
<None Include="res\pose\mpi\pose_iter_160000.caffemodel" />
</ItemGroup>
<PropertyGroup Label="Globals">
<VCProjectVersion>16.0</VCProjectVersion>
<ProjectGuid>{A7ECF1BE-DB22-4BF7-BFF6-E3BF72691EE6}</ProjectGuid>

View File

@@ -57,7 +57,13 @@
<ClCompile Include="src\computervision\BackgroundRemover.cpp">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="src\computervision\MenuTest.cpp">
<ClCompile Include="src\computervision\OpenPoseVideo.cpp">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="src\computervision\async\async_arm_detection.cpp">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="src\computervision\VideoCapture.cpp">
<Filter>Source Files</Filter>
</ClCompile>
</ItemGroup>
@@ -107,11 +113,22 @@
<ClInclude Include="src\computervision\BackgroundRemover.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="src\computervision\MenuTest.h">
<ClInclude Include="src\computervision\OpenPoseVideo.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="src\computervision\async\async_arm_detection.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="src\computervision\VideoCapture.h">
<Filter>Header Files</Filter>
</ClInclude>
</ItemGroup>
<ItemGroup>
<Xml Include="res\haarcascade_frontalface_alt.xml" />
</ItemGroup>
<ItemGroup>
<None Include="res\pose\coco\pose_deploy_linevec.prototxt" />
<None Include="res\pose\mpi\pose_deploy_linevec_faster_4_stages.prototxt" />
<None Include="res\pose\mpi\pose_iter_160000.caffemodel" />
</ItemGroup>
</Project>