[ADD] basis for async arm detection

This commit is contained in:
Sem van der Hoeven
2021-05-28 15:31:21 +02:00
parent a68c6a57bf
commit 40529f84b3
9 changed files with 119 additions and 56 deletions

View File

@@ -11,7 +11,7 @@
namespace computervision
{
cv::VideoCapture cap(1);
cv::VideoCapture cap(0);
cv::Mat img, imgGray, img2, img2Gray, img3, img4;
@@ -33,6 +33,11 @@ namespace computervision
return img;
}
cv::VideoCapture ObjectDetection::getCap()
{
return cap;
}
bool ObjectDetection::detectHand(Mat cameraFrame)
{
Mat inputFrame = generateHandMaskSquare(cameraFrame);

View File

@@ -65,6 +65,8 @@ namespace computervision
*/
bool drawHandMaskRect(cv::Mat *input);
cv::VideoCapture getCap();
};

View File

@@ -44,59 +44,34 @@ namespace computervision
net = readNetFromCaffe(protoFile, weightsFile);
}
void OpenPoseVideo::movementSkeleton(Mat inputImage) {
//string device = "cpu";
//string videoFile = "sample_video.mp4";
// Take arguments from commmand line
/*if (argc == 2)
{
if ((string)argv[1] == "gpu")
device = "gpu";
else
videoFile = argv[1];
}
else if (argc == 3)
{
videoFile = argv[1];
if ((string)argv[2] == "gpu")
device = "gpu";
}*/
void OpenPoseVideo::movementSkeleton(Mat inputImage, std::function<void(std::vector<Point>)> f) {
std::cout << "movement skeleton start" << std::endl;
int inWidth = 368;
int inHeight = 368;
float thresh = 0.01;
Mat frame, frameCopy;
Mat frame;
int frameWidth = inputImage.size().width;
int frameHeight = inputImage.size().height;
/*if (device == "cpu")
{
cout << "Using CPU device" << endl;
net.setPreferableBackend(DNN_TARGET_CPU);
}
else if (device == "gpu")
{
cout << "Using GPU device" << endl;
net.setPreferableBackend(DNN_BACKEND_CUDA);
net.setPreferableTarget(DNN_TARGET_CUDA);
}*/
double t = (double)cv::getTickCount();
std::cout << "reading input image and blob" << std::endl;
frame = inputImage;
frameCopy = frame.clone();
Mat inpBlob = blobFromImage(frame, 1.0 / 255, Size(inWidth, inHeight), Scalar(0, 0, 0), false, false);
std::cout << "done reading image and blob" << std::endl;
net.setInput(inpBlob);
std::cout << "done setting input to net" << std::endl;
Mat output = net.forward();
int H = output.size[2];
int W = output.size[3];
std::cout << "about to find position of boxy parts" << std::endl;
// find the position of the body parts
vector<Point> points(nPoints);
for (int n = 0; n < nPoints; n++)
@@ -114,31 +89,16 @@ namespace computervision
p.x *= (float)frameWidth / W;
p.y *= (float)frameHeight / H;
circle(frameCopy, cv::Point((int)p.x, (int)p.y), 8, Scalar(0, 255, 255), -1);
cv::putText(frameCopy, cv::format("%d", n), cv::Point((int)p.x, (int)p.y), cv::FONT_HERSHEY_COMPLEX, 1.1, cv::Scalar(0, 0, 255), 2);
circle(frame, cv::Point((int)p.x, (int)p.y), 8, Scalar(0, 255, 255), -1);
cv::putText(frame, cv::format("%d", n), cv::Point((int)p.x, (int)p.y), cv::FONT_HERSHEY_COMPLEX, 1.1, cv::Scalar(0, 0, 255), 2);
}
points[n] = p;
}
int nPairs = sizeof(POSE_PAIRS) / sizeof(POSE_PAIRS[0]);
for (int n = 0; n < nPairs; n++)
{
// lookup 2 connected body/hand parts
Point2f partA = points[POSE_PAIRS[n][0]];
Point2f partB = points[POSE_PAIRS[n][1]];
if (partA.x <= 0 || partA.y <= 0 || partB.x <= 0 || partB.y <= 0)
continue;
line(frame, partA, partB, Scalar(0, 255, 255), 8);
circle(frame, partA, 8, Scalar(0, 0, 255), -1);
circle(frame, partB, 8, Scalar(0, 0, 255), -1);
}
t = ((double)cv::getTickCount() - t) / cv::getTickFrequency();
cv::putText(frame, cv::format("time taken = %.2f sec", t), cv::Point(50, 50), cv::FONT_HERSHEY_COMPLEX, .8, cv::Scalar(255, 50, 0), 2);
// imshow("Output-Keypoints", frameCopy);
imshow("Output-Skeleton", frame);
std::cout << "about to call points receiving method" << std::endl;
f(points);
}
}

View File

@@ -13,7 +13,7 @@ namespace computervision
private:
public:
void movementSkeleton(Mat inputImage);
void movementSkeleton(Mat inputImage,std::function<void(std::vector<Point>)> f);
void setup();
};
}

View File

@@ -0,0 +1,43 @@
#include <iostream>
#include "async_arm_detection.h"
#include "../OpenPoseVideo.h"
#include <thread>
namespace computervision
{
AsyncArmDetection::AsyncArmDetection()
{
}
void AsyncArmDetection::run_arm_detection()
{
}
void AsyncArmDetection::start(std::function<void(std::vector<Point>)> points_ready_func, cv::VideoCapture cap, OpenPoseVideo op)
{
auto lambda = [](std::function<void(std::vector<Point>)> f, cv::VideoCapture c, OpenPoseVideo op) {
std::cout << "STARTING THREAD LAMBDA" << std::endl;
cv::VideoCapture cap(0);
if (!cap.isOpened())
{
std::cout << "error opening video" << std::endl;
return;
}
while (true)
{
Mat img;
cap.read(img);
op.movementSkeleton(img, f);
}
};
std::cout << "starting function" << std::endl;
std::thread async_arm_detect_thread(lambda, points_ready_func, cap, op);
}
}

View File

@@ -0,0 +1,22 @@
#pragma once
#include <vector>
#include <opencv2/core/types.hpp>
#include <opencv2/videoio.hpp>
#include <functional>
#include "../OpenPoseVideo.h"
namespace computervision
{
class AsyncArmDetection
{
public:
AsyncArmDetection(void);
void start(std::function<void(std::vector<cv::Point>)>, cv::VideoCapture cap, computervision::OpenPoseVideo op);
private:
void run_arm_detection();
};
}

View File

@@ -1,6 +1,8 @@
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <glm/gtc/matrix_transform.hpp>
#include <functional>
#include <vector>
#define STB_IMAGE_IMPLEMENTATION
#include "stb_image.h"
#include <ostream>
@@ -21,6 +23,8 @@
//#include "computervision/OpenPoseImage.h"
#include "computervision/OpenPoseVideo.h"
#include "computervision/async/async_arm_detection.h"
#pragma comment(lib, "glfw3.lib")
#pragma comment(lib, "glew32s.lib")
#pragma comment(lib, "opengl32.lib")
@@ -29,6 +33,11 @@ static double UpdateDelta();
static GLFWwindow* window;
void retrieve_points(std::vector<Point> arm_points)
{
std::cout << "got points!!" << std::endl;
std::cout << "points: " << arm_points << std::endl;
}
int main(void)
{
@@ -68,6 +77,7 @@ int main(void)
computervision::ObjectDetection objDetect;
//computervision::OpenPoseImage openPoseImage;
computervision::OpenPoseVideo openPoseVideo;
openPoseVideo.setup();
// set up object detection
@@ -75,7 +85,11 @@ int main(void)
cv::Mat cameraFrame;
openPoseVideo.setup();
//openPoseVideo.setup();
computervision::AsyncArmDetection as;
as.start(retrieve_points, objDetect.getCap(),openPoseVideo);
// Main game loop
while (!glfwWindowShouldClose(window))
@@ -95,7 +109,6 @@ int main(void)
cameraFrame = objDetect.readCamera();
//objDetect.detectHand(cameraFrame);
openPoseVideo.movementSkeleton(cameraFrame);
// Finish up
shader.Stop();

View File

@@ -19,6 +19,7 @@
</ProjectConfiguration>
</ItemGroup>
<ItemGroup>
<ClCompile Include="src\computervision\async\async_arm_detection.cpp" />
<ClCompile Include="src\computervision\FaceDetector.cpp" />
<ClCompile Include="src\computervision\ObjectDetection.cpp" />
<ClCompile Include="src\computervision\OpenPoseVideo.cpp" />
@@ -36,6 +37,7 @@
<ClCompile Include="src\toolbox\toolbox.cpp" />
</ItemGroup>
<ItemGroup>
<ClInclude Include="src\computervision\async\async_arm_detection.h" />
<ClInclude Include="src\computervision\FaceDetector.h" />
<ClInclude Include="src\computervision\FingerCount.h" />
<ClInclude Include="src\computervision\BackgroundRemover.h" />
@@ -56,6 +58,11 @@
<ItemGroup>
<Xml Include="res\haarcascade_frontalface_alt.xml" />
</ItemGroup>
<ItemGroup>
<None Include="res\pose\coco\pose_deploy_linevec.prototxt" />
<None Include="res\pose\mpi\pose_deploy_linevec_faster_4_stages.prototxt" />
<None Include="res\pose\mpi\pose_iter_160000.caffemodel" />
</ItemGroup>
<PropertyGroup Label="Globals">
<VCProjectVersion>16.0</VCProjectVersion>
<ProjectGuid>{A7ECF1BE-DB22-4BF7-BFF6-E3BF72691EE6}</ProjectGuid>

View File

@@ -60,6 +60,9 @@
<ClCompile Include="src\computervision\OpenPoseVideo.cpp">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="src\computervision\async\async_arm_detection.cpp">
<Filter>Source Files</Filter>
</ClCompile>
</ItemGroup>
<ItemGroup>
<ClInclude Include="src\entities\Camera.h">
@@ -110,8 +113,16 @@
<ClInclude Include="src\computervision\OpenPoseVideo.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="src\computervision\async\async_arm_detection.h">
<Filter>Header Files</Filter>
</ClInclude>
</ItemGroup>
<ItemGroup>
<Xml Include="res\haarcascade_frontalface_alt.xml" />
</ItemGroup>
<ItemGroup>
<None Include="res\pose\coco\pose_deploy_linevec.prototxt" />
<None Include="res\pose\mpi\pose_deploy_linevec_faster_4_stages.prototxt" />
<None Include="res\pose\mpi\pose_iter_160000.caffemodel" />
</ItemGroup>
</Project>