[ADD] added all the files

This commit is contained in:
Jasper
2021-05-28 12:27:12 +02:00
parent f1f1aac93d
commit 078a6ce66d
8 changed files with 5383 additions and 5 deletions

2
.gitignore vendored
View File

@@ -428,4 +428,6 @@ FodyWeavers.xsd
**/docs/* **/docs/*
**/doc/* **/doc/*
**/pose_iter_160000.caffemodel
# End of https://www.toptal.com/developers/gitignore/api/c++,visualstudio,visualstudiocode,opencv # End of https://www.toptal.com/developers/gitignore/api/c++,visualstudio,visualstudiocode,opencv

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -11,7 +11,7 @@
namespace computervision namespace computervision
{ {
cv::VideoCapture cap(0); cv::VideoCapture cap(1);
cv::Mat img, imgGray, img2, img2Gray, img3, img4; cv::Mat img, imgGray, img2, img2Gray, img3, img4;
@@ -59,10 +59,10 @@ namespace computervision
putText(cameraFrame,hand_text, Point(10, 75), FONT_HERSHEY_PLAIN, 2.0, Scalar(255, 0, 255),3); putText(cameraFrame,hand_text, Point(10, 75), FONT_HERSHEY_PLAIN, 2.0, Scalar(255, 0, 255),3);
imshow("camera", cameraFrame); imshow("camera", cameraFrame);
/* imshow("output", frameOut); imshow("output", frameOut);
imshow("foreground", foreground); imshow("foreground", foreground);
imshow("handMask", handMask); imshow("handMask", handMask);
imshow("handDetection", fingerCountDebug);*/ imshow("handDetection", fingerCountDebug);
int key = waitKey(1); int key = waitKey(1);

View File

@@ -0,0 +1,149 @@
#include "OpenPoseImage.h"
using namespace std;
using namespace cv;
using namespace cv::dnn;
namespace computervision
{
#define MPI
#ifdef MPI
const int POSE_PAIRS[14][2] =
{
{0,1}, {1,2}, {2,3},
{3,4}, {1,5}, {5,6},
{6,7}, {1,14}, {14,8}, {8,9},
{9,10}, {14,11}, {11,12}, {12,13}
};
string protoFile = "res/pose/mpi/pose_deploy_linevec_faster_4_stages.prototxt";
string weightsFile = "res/pose/mpi/pose_iter_160000.caffemodel";
int nPoints = 15;
#endif
#ifdef COCO
const int POSE_PAIRS[17][2] =
{
{1,2}, {1,5}, {2,3},
{3,4}, {5,6}, {6,7},
{1,8}, {8,9}, {9,10},
{1,11}, {11,12}, {12,13},
{1,0}, {0,14},
{14,16}, {0,15}, {15,17}
};
string protoFile = "pose/coco/pose_deploy_linevec.prototxt";
string weightsFile = "pose/coco/pose_iter_440000.caffemodel";
int nPoints = 18;
#endif
void OpenPoseImage::movementSkeleton(Mat inputImage) {
cout << "USAGE : ./OpenPose <imageFile> " << endl;
cout << "USAGE : ./OpenPose <imageFile> <device>" << endl;
string device = "cpu";
//string imageFile = "single.jpeg";
// Take arguments from commmand line
/* if (argc == 2)
{
if ((string)argv[1] == "gpu")
device = "gpu";
else
imageFile = argv[1];
}
else if (argc == 3)
{
imageFile = argv[1];
if ((string)argv[2] == "gpu")
device = "gpu";
}*/
int inWidth = 368;
int inHeight = 368;
float thresh = 0.1;
Mat frame = inputImage;
Mat frameCopy = frame.clone();
int frameWidth = frame.cols;
int frameHeight = frame.rows;
double t = (double)cv::getTickCount();
Net net = readNetFromCaffe(protoFile, weightsFile);
if (device == "cpu")
{
cout << "Using CPU device" << endl;
net.setPreferableBackend(DNN_TARGET_CPU);
}
else if (device == "gpu")
{
cout << "Using GPU device" << endl;
net.setPreferableBackend(DNN_BACKEND_CUDA);
net.setPreferableTarget(DNN_TARGET_CUDA);
}
Mat inpBlob = blobFromImage(frame, 1.0 / 255, Size(inWidth, inHeight), Scalar(0, 0, 0), false, false);
net.setInput(inpBlob);
Mat output = net.forward();
int H = output.size[2];
int W = output.size[3];
// find the position of the body parts
vector<Point> points(nPoints);
for (int n = 0; n < nPoints; n++)
{
// Probability map of corresponding body's part.
Mat probMap(H, W, CV_32F, output.ptr(0, n));
Point2f p(-1, -1);
Point maxLoc;
double prob;
minMaxLoc(probMap, 0, &prob, 0, &maxLoc);
if (prob > thresh)
{
p = maxLoc;
p.x *= (float)frameWidth / W;
p.y *= (float)frameHeight / H;
circle(frameCopy, cv::Point((int)p.x, (int)p.y), 8, Scalar(0, 255, 255), -1);
cv::putText(frameCopy, cv::format("%d", n), cv::Point((int)p.x, (int)p.y), cv::FONT_HERSHEY_COMPLEX, 1, cv::Scalar(0, 0, 255), 2);
}
points[n] = p;
}
int nPairs = sizeof(POSE_PAIRS) / sizeof(POSE_PAIRS[0]);
for (int n = 0; n < nPairs; n++)
{
// lookup 2 connected body/hand parts
Point2f partA = points[POSE_PAIRS[n][0]];
Point2f partB = points[POSE_PAIRS[n][1]];
if (partA.x <= 0 || partA.y <= 0 || partB.x <= 0 || partB.y <= 0)
continue;
line(frame, partA, partB, Scalar(0, 255, 255), 8);
circle(frame, partA, 8, Scalar(0, 0, 255), -1);
circle(frame, partB, 8, Scalar(0, 0, 255), -1);
}
t = ((double)cv::getTickCount() - t) / cv::getTickFrequency();
cout << "Time Taken = " << t << endl;
imshow("Output-Keypoints", frameCopy);
imshow("Output-Skeleton", frame);
imwrite("Output-Skeleton.jpg", frame);
}
}

View File

@@ -0,0 +1,144 @@
#include "OpenPoseVideo.h"
using namespace std;
using namespace cv;
using namespace cv::dnn;
namespace computervision
{
#define MPI
#ifdef MPI
const int POSE_PAIRS[7][2] =
{
{0,1}, {1,2}, {2,3},
{3,4}, {1,5}, {5,6},
{6,7}
};
string protoFile = "res/pose/mpi/pose_deploy_linevec_faster_4_stages.prototxt";
string weightsFile = "res/pose/mpi/pose_iter_160000.caffemodel";
int nPoints = 8;
#endif
#ifdef COCO
const int POSE_PAIRS[17][2] =
{
{1,2}, {1,5}, {2,3},
{3,4}, {5,6}, {6,7},
{1,8}, {8,9}, {9,10},
{1,11}, {11,12}, {12,13},
{1,0}, {0,14},
{14,16}, {0,15}, {15,17}
};
string protoFile = "pose/coco/pose_deploy_linevec.prototxt";
string weightsFile = "pose/coco/pose_iter_440000.caffemodel";
int nPoints = 18;
#endif
Net net;
void OpenPoseVideo::setup() {
net = readNetFromCaffe(protoFile, weightsFile);
}
void OpenPoseVideo::movementSkeleton(Mat inputImage) {
//string device = "cpu";
//string videoFile = "sample_video.mp4";
// Take arguments from commmand line
/*if (argc == 2)
{
if ((string)argv[1] == "gpu")
device = "gpu";
else
videoFile = argv[1];
}
else if (argc == 3)
{
videoFile = argv[1];
if ((string)argv[2] == "gpu")
device = "gpu";
}*/
int inWidth = 368;
int inHeight = 368;
float thresh = 0.01;
Mat frame, frameCopy;
int frameWidth = inputImage.size().width;
int frameHeight = inputImage.size().height;
/*if (device == "cpu")
{
cout << "Using CPU device" << endl;
net.setPreferableBackend(DNN_TARGET_CPU);
}
else if (device == "gpu")
{
cout << "Using GPU device" << endl;
net.setPreferableBackend(DNN_BACKEND_CUDA);
net.setPreferableTarget(DNN_TARGET_CUDA);
}*/
double t = (double)cv::getTickCount();
frame = inputImage;
frameCopy = frame.clone();
Mat inpBlob = blobFromImage(frame, 1.0 / 255, Size(inWidth, inHeight), Scalar(0, 0, 0), false, false);
net.setInput(inpBlob);
Mat output = net.forward();
int H = output.size[2];
int W = output.size[3];
// find the position of the body parts
vector<Point> points(nPoints);
for (int n = 0; n < nPoints; n++)
{
// Probability map of corresponding body's part.
Mat probMap(H, W, CV_32F, output.ptr(0, n));
Point2f p(-1, -1);
Point maxLoc;
double prob;
minMaxLoc(probMap, 0, &prob, 0, &maxLoc);
if (prob > thresh)
{
p = maxLoc;
p.x *= (float)frameWidth / W;
p.y *= (float)frameHeight / H;
circle(frameCopy, cv::Point((int)p.x, (int)p.y), 8, Scalar(0, 255, 255), -1);
cv::putText(frameCopy, cv::format("%d", n), cv::Point((int)p.x, (int)p.y), cv::FONT_HERSHEY_COMPLEX, 1.1, cv::Scalar(0, 0, 255), 2);
}
points[n] = p;
}
int nPairs = sizeof(POSE_PAIRS) / sizeof(POSE_PAIRS[0]);
for (int n = 0; n < nPairs; n++)
{
// lookup 2 connected body/hand parts
Point2f partA = points[POSE_PAIRS[n][0]];
Point2f partB = points[POSE_PAIRS[n][1]];
if (partA.x <= 0 || partA.y <= 0 || partB.x <= 0 || partB.y <= 0)
continue;
line(frame, partA, partB, Scalar(0, 255, 255), 8);
circle(frame, partA, 8, Scalar(0, 0, 255), -1);
circle(frame, partB, 8, Scalar(0, 0, 255), -1);
}
t = ((double)cv::getTickCount() - t) / cv::getTickFrequency();
cv::putText(frame, cv::format("time taken = %.2f sec", t), cv::Point(50, 50), cv::FONT_HERSHEY_COMPLEX, .8, cv::Scalar(255, 50, 0), 2);
// imshow("Output-Keypoints", frameCopy);
imshow("Output-Skeleton", frame);
}
}

View File

@@ -0,0 +1,19 @@
#pragma once
#include <opencv2/dnn.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/highgui.hpp>
#include <iostream>
using namespace cv;
namespace computervision
{
class OpenPoseVideo{
private:
public:
void movementSkeleton(Mat inputImage);
void setup();
};
}

View File

@@ -18,6 +18,8 @@
#include "toolbox/toolbox.h" #include "toolbox/toolbox.h"
#include "computervision/ObjectDetection.h" #include "computervision/ObjectDetection.h"
//#include "computervision/OpenPoseImage.h"
#include "computervision/OpenPoseVideo.h"
#pragma comment(lib, "glfw3.lib") #pragma comment(lib, "glfw3.lib")
#pragma comment(lib, "glew32s.lib") #pragma comment(lib, "glew32s.lib")
@@ -64,12 +66,17 @@ int main(void)
// create object detection object instance // create object detection object instance
computervision::ObjectDetection objDetect; computervision::ObjectDetection objDetect;
//computervision::OpenPoseImage openPoseImage;
computervision::OpenPoseVideo openPoseVideo;
// set up object detection // set up object detection
//objDetect.setup(); //objDetect.setup();
cv::Mat cameraFrame; cv::Mat cameraFrame;
openPoseVideo.setup();
// Main game loop // Main game loop
while (!glfwWindowShouldClose(window)) while (!glfwWindowShouldClose(window))
{ {
@@ -87,8 +94,8 @@ int main(void)
render_engine::renderer::Render(entity, shader); render_engine::renderer::Render(entity, shader);
cameraFrame = objDetect.readCamera(); cameraFrame = objDetect.readCamera();
objDetect.detectHand(cameraFrame); //objDetect.detectHand(cameraFrame);
openPoseVideo.movementSkeleton(cameraFrame);
// Finish up // Finish up
shader.Stop(); shader.Stop();