您的位置:首页 > 运维架构

kinect2.0 opencv3.2 骨骼图像提取

2017-09-15 16:37 357 查看
转载请注明出处:http://blog.csdn.net/mystylee/article/details/77993374
本人配置:win10 + visual studio 2015 + kinect 2.0 + opencv 3.2

说明:由于本人前面有一个桌子,没有读取到腿部膝盖以下的关节点。如果要测试,请到比较宽阔的地方。
另外,本人将右手关节点的坐标显示了出来。有兴趣的同学可以利用这一深度将其手部分割出来。我在后面的博客中会实现这一功能。

本文主要将彩色图像和人体对应的骨骼图像展现出来。具体代码如下所示:
#include <kinect.h>

#include <opencv2/imgproc.hpp>

#include <opencv2/highgui.hpp>

#include <opencv2/calib3d.hpp>

#include <fstream>

#include <iostream>

using namespace std;

using namespace cv;

void    draw(Mat & img, Joint & r_1, Joint & r_2, ICoordinateMapper * myMapper);

HRESULT hr;

int main()

{

IKinectSensor* kinectsensor = nullptr;
IBodyFrameSource* bodySource = nullptr;
IDepthFrameSource* depthSource = nullptr;
IColorFrameSource* colorSource = nullptr;
IBodyFrameReader* bodyReader = nullptr;
IDepthFrameReader* depthReader = nullptr;
IColorFrameReader* colorReader = nullptr;
ICoordinateMapper* coordinateMapper = nullptr;

//Mat skeletonImg(height, width, CV_8UC3);

//skeletonImg.setTo(0);
namedWindow("body");

char file_name[20];
int a = 0;

hr = GetDefaultKinectSensor(&kinectsensor);
if (FAILED(hr))
{
return hr;
}
hr = kinectsensor->Open();
if (FAILED(hr))
{
cout << "kinect open failed!" << endl;
}
hr = kinectsensor->get_ColorFrameSource(&colorSource);
if (FAILED(hr))
{
cout << "colorsource failed!" << endl;
}
hr = kinectsensor->get_BodyFrameSource(&bodySource);
if (FAILED(hr))
{
cout << "bodysource failed!" << endl;
}
hr = kinectsensor->get_DepthFrameSource(&depthSource);
if (FAILED(hr))
{
cout << "depthsource failed!" << endl;
}
hr = colorSource->OpenReader(&colorReader);
if (FAILED(hr))
{
cout << "colorreader failed!" << endl;
}
hr = bodySource->OpenReader(&bodyReader);
if (FAILED(hr))
{
cout << "bodyreader failed!" << endl;
}
hr = depthSource->OpenReader(&depthReader);
if (FAILED(hr))
{
cout << "depthreader failed!" << endl;
}
hr = kinectsensor->get_CoordinateMapper(&coordinateMapper);
if (FAILED(hr))
{
cout << "coordinatemapper failed!" << endl;
}

/*if (FAILED(hr) || !kinectsensor)
{
cout << "kinect open failed!" << endl;
}
*/
int colorHeight = 0, colorWidth = 0;
IFrameDescription* framedescription = nullptr;
colorSource->get_FrameDescription(&framedescription);
framedescription->get_Height(&colorHeight);
framedescription->get_Width(&colorWidth);
framedescription->Release();
int bodycount = 0;
bodySource->get_BodyCount(&bodycount);
int depthHeight = 0, depthWidth = 0;
IFrameDescription* frameDescription = nullptr;
depthSource->get_FrameDescription(&frameDescription);
frameDescription->get_Height(&depthHeight);
frameDescription->get_Width(&depthWidth);
Mat original(depthHeight, depthWidth, CV_8UC3);
Mat colorImg(colorHeight, colorWidth, CV_8UC4);
UINT16 *depthData = new UINT16[depthHeight*depthWidth];
DepthSpacePoint*  output = new DepthSpacePoint[depthHeight*depthWidth];
IDepthFrame* depthFrame = nullptr;
IColorFrame* colorFrame = nullptr;

char handright[20];
char handleft[20];
char HandTipLeft[20];
char ThumbLeft[20];
char HandTipRight[20];
char ThumbRight[20];

ofstream handpoint("D:\\visual studio2017\\work\\json\\Project2\\hand\\hand\\hand.txt");

while (1)
{
original.setTo(0);
while (colorReader->AcquireLatestFrame(&colorFrame) != S_OK);
colorFrame->CopyConvertedFrameDataToArray(colorHeight*colorWidth * 4, colorImg.data, ColorImageFormat_Bgra);
imshow("colorImg", colorImg);
colorFrame->Release();

while (depthReader->AcquireLatestFrame(&depthFrame) != S_OK);
depthFrame->CopyFrameDataToArray(depthHeight*depthWidth, depthData);
depthFrame->Release();

IBodyFrame* bodyFrame = nullptr;
hr = bodyReader->AcquireLatestFrame(&bodyFrame);//读取身体图像
if (hr == S_OK)
{

IBody** bodyArr = new IBody*[bodycount];//为存身体数据的数组做准备
cout << "bodycount" << bodycount << endl;//6
for (int i = 0; i < bodycount; i++)
{
bodyArr[i] = nullptr;
}
if (bodyFrame->GetAndRefreshBodyData(bodycount, bodyArr) == S_OK)//把身体数组输入数组
{
for (int i = 0; i < bodycount; i++)
{
BOOLEAN bodyTracked = false;
//DepthSpacePoint* depthSpacePoint = new DepthSpacePoint[bodycount];

if (bodyArr[i]->get_IsTracked(&bodyTracked) == S_OK && bodyTracked)//先判断是否检测到
{

//cout << "*******" << endl;
Joint joints[JointType_Count];
if (bodyArr[i]->GetJoints(JointType_Count, joints) == S_OK)//如果监测到就把关节点数据输入到数组并画图
{

draw(original, joints[JointType_Head], joints[JointType_Neck], coordinateMapper);
draw(original, joints[JointType_Neck], joints[JointType_SpineShoulder], coordinateMapper);

draw(original, joints[JointType_SpineShoulder], joints[JointType_ShoulderLeft], coordinateMapper);
draw(original, joints[JointType_SpineShoulder], joints[JointType_SpineMid], coordinateMapper);
draw(original, joints[JointType_SpineShoulder], joints[JointType_ShoulderRight], coordinateMapper);

draw(original, joints[JointType_ShoulderLeft], joints[JointType_ElbowLeft], coordinateMapper);
draw(original, joints[JointType_SpineMid], joints[JointType_SpineBase], coordinateMapper);
draw(original, joints[JointType_ShoulderRight], joints[JointType_ElbowRight], coordinateMapper);

draw(original, joints[JointType_ElbowLeft], joints[JointType_WristLeft], coordinateMapper);
draw(original, joints[JointType_SpineBase], joints[JointType_HipLeft], coordinateMapper);
draw(original, joints[JointType_SpineBase], joints[JointType_HipRight], coordinateMapper);
draw(original, joints[JointType_ElbowRight], joints[JointType_WristRight], coordinateMapper);

draw(original, joints[JointType_WristLeft], joints[JointType_ThumbLeft], coordinateMapper);
draw(original, joints[JointType_WristLeft], joints[JointType_HandLeft], coordinateMapper);
draw(original, joints[JointType_HipLeft], joints[JointType_KneeLeft], coordinateMapper);
draw(original, joints[JointType_HipRight], joints[JointType_KneeRight], coordinateMapper);
draw(original, joints[JointType_WristRight], joints[JointType_ThumbRight], coordinateMapper);
draw(original, joints[JointType_WristRight], joints[JointType_HandRight], coordinateMapper);

draw(original, joints[JointType_HandLeft], joints[JointType_HandTipLeft], coordinateMapper);
draw(original, joints[JointType_KneeLeft], joints[JointType_FootLeft], coordinateMapper);
draw(original, joints[JointType_KneeRight], joints[JointType_FootRight], coordinateMapper);
draw(original, joints[JointType_HandRight], joints[JointType_HandTipRight], coordinateMapper);

for (int k = 0; k < JointType_Count; k++)
{
DepthSpacePoint t_point;//把关节点用的摄像机下的点转换成彩色空间的点
coordinateMapper->MapCameraPointToDepthSpace(joints[k].Position, &t_point);
Point point;
point.x = t_point.X;
point.y = t_point.Y;
cout << k << "point.x = " << point.x << "   " <<
"point.y = " << point.y << "   " <<
"point.z = " << joints[k].Position.Z << endl;
circle(original, point, 5, Scalar(0, 255, 0), -1, 8);//handleft=7,handright=11
if (k == 7 || k == 11 || k == 21 || k == 22 || k == 23 || k == 24)
{

if (handpoint.is_open())
{
if (k == 7)
{
handpoint << "handleft.x = " << point.x << "   " <<
"handleft.y = " << point.y << "   " <<
"handleft.z = " << joints[k].Position.Z << endl;
sprintf(handleft, "x:%d,y:%d", (int)point.x, (int)point.y);
putText(original, handleft, point, FONT_HERSHEY_PLAIN, 1, Scalar(255, 255, 255), 2, 8);
}
else if (k == 11)
{
handpoint << "handright.x = " << point.x << "   " <<
"handright.y = " << point.y << "   " <<
"handright.z = " << joints[k].Position.Z << endl;
sprintf(handright, "x:%d,y:%d", (int)point.x, (int)point.y);
//putText(original, handright, point, FONT_HERSHEY_PLAIN, 1, Scalar(255, 255, 255), 2, 8);
}
else if (k == 21)
{
handpoint << "HandTipLeft.x = " << point.x << "   " <<
"HandTipLeft.y = " << point.y << "   " <<
"HandTipLeft.z = " << joints[k].Position.Z << endl;
sprintf(HandTipLeft, "x:%d,y:%d", (int)point.x, (int)point.y);
//putText(original, HandTipLeft, point, FONT_HERSHEY_PLAIN, 1, Scalar(255, 255, 255), 2, 8);
}
else if (k == 22)
{
handpoint << "ThumbLeft.x = " << point.x << "   " <<
"ThumbLeft.y = " << point.y << "   " <<
"ThumbLeft.z = " << joints[k].Position.Z << endl;
sprintf(ThumbLeft, "x:%d,y:%d", (int)point.x, (int)point.y);
//putText(original, ThumbLeft, point, FONT_HERSHEY_PLAIN, 1, Scalar(255, 255, 255), 2, 8);
}
else if (k == 23)
{
handpoint << "HandTipRight.x = " << point.x << "   " <<
"HandTipRight.y = " << point.y << "   " <<
"HandTipRight.z = " << joints[k].Position.Z << endl;
sprintf(HandTipRight, "x:%d,y:%d", (int)point.x, (int)point.y);
//putText(original, HandTipRight, point, FONT_HERSHEY_PLAIN, 1, Scalar(255, 255, 255), 2, 8);
}
else if (k == 24)
{
handpoint << "ThumbRight.x = " << point.x << "   " <<
"ThumbRight.y = " << point.y << "   " <<
"ThumbRight.z = " << joints[k].Position.Z << endl;
sprintf(ThumbRight, "x:%d,y:%d", (int)point.x, (int)point.y);
//putText(original, ThumbRight, point, FONT_HERSHEY_PLAIN, 1, Sca
9bdb
lar(255, 255, 255), 2, 8);
}
}
}//waitKey(100);
}
}
}
}
delete[] bodyArr;
bodyFrame->Release(); imshow("body", original);
sprintf(file_name, "body + %d.jpg", a++);
imwrite(file_name, original);
}
}
waitKey(10);
}
handpoint.close();
return 0;

}

void    draw(Mat & img, Joint & r_1, Joint & r_2, ICoordinateMapper * coordinateMapper)

{
//用两个关节点来做线段的两端,并且进行状态过滤
if (r_1.TrackingState == TrackingState_Tracked && r_2.TrackingState == TrackingState_Tracked)
{
DepthSpacePoint t_point;    //要把关节点用的摄像机坐标下的点转换成深度空间的点
Point   p_1, p_2;
coordinateMapper->MapCameraPointToDepthSpace(r_1.Position, &t_point);
p_1.x = t_point.X;
p_1.y = t_point.Y;
coordinateMapper->MapCameraPointToDepthSpace(r_2.Position, &t_point);
p_2.x = t_point.X;
p_2.y = t_point.Y;

line(img, p_1, p_2, Vec3b(255, 255, 0), 3);
//circle(img, p_1, 5, Vec3b(255, 0, 0), -1);
//circle(img, p_2, 5, Vec3b(255, 0, 0), -1);
}

}

内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: