kinect 深度图与彩色图对齐程序
//#include "duiqi.hpp"
#include "kinect.h"
#include <iostream>
#include "opencv2/opencv.hpp"
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <fstream>
using namespace cv;
using namespace std;
Mat depthFilter(UINT16 *depthData);
template<class Interface>
inline void SafeRelease(Interface *& pInterfaceToRelease)
{
if (pInterfaceToRelease != NULL)
{
pInterfaceToRelease->Release();
pInterfaceToRelease = NULL;
}
}
UINT16 uDepthMin = 0, uDepthMax = 0;
int main()
{
IKinectSensor* m_pKinectSensor;
HRESULT hr;
ICoordinateMapper* m_pCoordinateMapper = NULL;
CameraIntrinsics* m_pCameraIntrinsics = new CameraIntrinsics();
hr = GetDefaultKinectSensor(&m_pKinectSensor);
if (FAILED(hr))
{
return hr;
}
IMultiSourceFrameReader* m_pMultiFrameReader = NULL;
if (m_pKinectSensor)
{
hr = m_pKinectSensor->Open();
if (SUCCEEDED(hr))
{
hr = m_pKinectSensor->OpenMultiSourceFrameReader(
FrameSourceTypes::FrameSourceTypes_Color |
FrameSourceTypes::FrameSourceTypes_Infrared |
FrameSourceTypes::FrameSourceTypes_Depth,
&m_pMultiFrameReader);
}
}
if (SUCCEEDED(hr))
{
hr = m_pKinectSensor->get_CoordinateMapper(&m_pCoordinateMapper);
}
if (!m_pKinectSensor || FAILED(hr))
{
return E_FAIL;
}
IMultiSourceFrame* m_pMultiFrame = nullptr;
IDepthFrameReference* m_pDepthFrameReference = NULL;
IColorFrameReference* m_pColorFrameReference = NULL;
IInfraredFrameReference* m_pInfraredFrameReference = NULL;
IInfraredFrame* m_pInfraredFrame = NULL;
IDepthFrame* m_pDepthFrame = NULL;
IColorFrame* m_pColorFrame = NULL;
Mat rgb(1080, 1920, CV_8UC4);
Mat rgb_resize(540, 960, CV_8UC4);
DepthSpacePoint* m_pDepthCoordinates = NULL;
ColorSpacePoint* m_pColorCoordinates = NULL;
CameraSpacePoint* m_pCameraCoordinates = NULL;
m_pColorCoordinates = new ColorSpacePoint[512 * 424];
m_pCameraCoordinates = new CameraSpacePoint[512 * 424];
UINT16 *depthData = new UINT16[424 * 512];
Mat depth(424, 512, CV_16UC1);
Mat depth8U(424, 512, CV_8U);
vector<DepthSpacePoint> depthSpacePoints(1920 * 1080);
Mat CoordinateMapperMat(1080, 1520, CV_8U);
Mat CoordinateMapperMat_resize(540, 960, CV_8U);
int savecount = 0;
while (true)
{
hr = m_pMultiFrameReader->AcquireLatestFrame(&m_pMultiFrame);
if (FAILED(hr) || !m_pMultiFrame)
{
continue;
}
if (SUCCEEDED(hr))
hr = m_pMultiFrame->get_ColorFrameReference(&m_pColorFrameReference);
if (SUCCEEDED(hr))
hr = m_pColorFrameReference->AcquireFrame(&m_pColorFrame);
if (SUCCEEDED(hr))
hr = m_pMultiFrame->get_DepthFrameReference(&m_pDepthFrameReference);
if (SUCCEEDED(hr))
hr = m_pDepthFrameReference->AcquireFrame(&m_pDepthFrame);
/*m_pDepthFrame->get_DepthMinReliableDistance(&uDepthMin);
m_pDepthFrame->get_DepthMaxReliableDistance(&uDepthMax);
cout << "Reliable Distance: " << uDepthMin << " - " << uDepthMax << endl;*/
savecount++;
cout << savecount << endl;
ostringstream savecountstr;
savecountstr << savecount;
UINT nColorBufferSize = 1920 * 1080 * 4;
if (SUCCEEDED(hr))
{
hr = m_pColorFrame->CopyConvertedFrameDataToArray(nColorBufferSize, reinterpret_cast<BYTE*>(rgb.data), ColorImageFormat::ColorImageFormat_Bgra);
Rect rect(200, 0, 1520, 1080);
Mat rgb_roi = rgb(rect);
resize(rgb_roi, rgb_resize, Size(), 0.4, 0.4);
imshow("color_resize", rgb_resize);
imwrite("D:/file/hust/ARcodes/ARKinect∂‘∆Î181107/ARKinect/save6/color/" + savecountstr.str() + ".png", rgb_resize);
}
UINT nDepthBufferSize = 424 * 512;
if (SUCCEEDED(hr))
{
hr = m_pDepthFrame->CopyFrameDataToArray(nDepthBufferSize, reinterpret_cast<UINT16*>(depthData));
m_pDepthFrame->CopyFrameDataToArray(nDepthBufferSize, reinterpret_cast<UINT16*>(depth.data));
//16位转成了8位显示
depth.convertTo(depth8U, CV_8U, 255.0f / 4500.0f);
imshow("depth", depth8U);
}
//16位数据
Mat filterDepth = depthFilter(depthData);
Mat filterDepth8U;
filterDepth.convertTo(filterDepth8U, CV_8U, 255.0f / 4500.0f);
if (SUCCEEDED(hr))
{
hr = m_pCoordinateMapper->MapColorFrameToDepthSpace(424 * 512, reinterpret_cast<UINT16*>(filterDepth.data), 1920 * 1080, &depthSpacePoints[0]);
}
if (SUCCEEDED(hr))
{
CoordinateMapperMat = Scalar(0, 0, 0, 0); // ∂®“ÂŒ™Mat(colorHeight, colorWidth, CV_8UC4)
for (int y = 0; y < 1080; y++)
{
for (int x = 200; x < 1720; x++)
//for (int x = 0; x < 1920; x++)
{
unsigned int index = y * 1920 + x;
DepthSpacePoint p = depthSpacePoints[index];
if (p.X != -std::numeric_limits<float>::infinity() && p.Y != -std::numeric_limits<float>::infinity())
{
int depthX = static_cast<int>(p.X + 0.2f);
int depthY = static_cast<int>(p.Y + 0.2f);
if ((depthX >= 0) && (depthX < 512) && (depthY >= 0) && (depthY < 424))
{
CoordinateMapperMat.at<uchar>(y, x - 200) = filterDepth8U.at<uchar>(depthY, depthX);
}
}
}
}
resize(CoordinateMapperMat, CoordinateMapperMat_resize, Size(), 0.4, 0.4);
imshow("CoordinateMapper", CoordinateMapperMat_resize);
imwrite("D:/file/hust/ARcodes/ARKinect∂‘∆Î181107/ARKinect/save6/result/" + savecountstr.str() + ".png", CoordinateMapperMat_resize);
}
int c = waitKey(1);
if ((char)c == VK_ESCAPE)
break;
SafeRelease(m_pColorFrame);
SafeRelease(m_pDepthFrame);
SafeRelease(m_pColorFrameReference);
SafeRelease(m_pDepthFrameReference);
SafeRelease(m_pMultiFrame);
}
cv::destroyAllWindows();
SafeRelease(m_pCoordinateMapper);
m_pKinectSensor->Close();
std::system("pause");
return 0;
}
Mat depthFilter(UINT16 *depthData) {
Mat i_before(424, 512, CV_8UC4);
Mat i_after(424, 512, CV_8UC4);
Mat i_result(424, 512, CV_16UC1);
cv::Mat i_result8U;
unsigned short maxDepth = 0;
unsigned short iZeroCountBefore = 0;
unsigned short iZeroCountAfter = 0;
unsigned short* depthArray = (unsigned short*)depthData;
for (int i = 0; i < 512 * 424; i++)
{
int row = i / 512;
int col = i % 512;
unsigned short depthValue = depthArray[row * 512 + col];
if (depthValue == 0)
{
i_before.data[i * 4] = 255;
i_before.data[i * 4 + 1] = 0;
i_before.data[i * 4 + 2] = 0;
i_before.data[i * 4 + 3] = depthValue / 256;
iZeroCountBefore++;
}
else
{
i_before.data[i * 4] = depthValue / 4500.0f * 256;
i_before.data[i * 4 + 1] = depthValue / 4500.0f * 256;
i_before.data[i * 4 + 2] = depthValue / 4500.0f * 256;
i_before.data[i * 4 + 3] = depthValue / 4500.0f * 256;
}
maxDepth = depthValue > maxDepth ? depthValue : maxDepth;
}
//cout << "max depth value: " << maxDepth << endl;
unsigned short* smoothDepthArray = (unsigned short*)i_result.data;
int widthBound = 512 - 1;
int heightBound = 424 - 1;
int innerBandThreshold = 1;
int outerBandThreshold = 3;
for (int depthArrayRowIndex = 0; depthArrayRowIndex<424; depthArrayRowIndex++)
{
for (int depthArrayColumnIndex = 0; depthArrayColumnIndex < 512; depthArrayColumnIndex++)
{
int depthIndex = depthArrayColumnIndex + (depthArrayRowIndex * 512);
if (depthArray[depthIndex] == 0)
{
int x = depthIndex % 512;
int y = (depthIndex - x) / 512;
unsigned short filterCollection[24][2] = { 0 };
int innerBandCount = 0;
int outerBandCount = 0;
for (int yi = -2; yi < 3; yi++)
{
for (int xi = -2; xi < 3; xi++)
{
if (xi != 0 || yi != 0)
{
int xSearch = x + xi;
int ySearch = y + yi;
if (xSearch >= 0 && xSearch <= widthBound &&
ySearch >= 0 && ySearch <= heightBound)
{
int index = xSearch + (ySearch * 512);
if (depthArray[index] != 0)
{
for (int i = 0; i < 24; i++)
{
if (filterCollection[i][0] == depthArray[index])
{
filterCollection[i][1]++;
break;
}
else if (filterCollection[i][0] == 0)
{
filterCollection[i][0] = depthArray[index];
filterCollection[i][1]++;
break;
}
}
if (yi != 2 && yi != -2 && xi != 2 && xi != -2)
innerBandCount++;
else
outerBandCount++;
}
}
}
}
}
if (innerBandCount >= innerBandThreshold || outerBandCount >= outerBandThreshold)
{
short frequency = 0;
short depth = 0;
for (int i = 0; i < 24; i++)
{
if (filterCollection[i][0] == 0)
break;
if (filterCollection[i][1] > frequency)
{
depth = filterCollection[i][0];
frequency = filterCollection[i][1];
}
}
smoothDepthArray[depthIndex] = depth;
}
else
{
smoothDepthArray[depthIndex] = 0;
}
}
else
{
smoothDepthArray[depthIndex] = depthArray[depthIndex];
}
}
}
for (int i = 0; i < 512 * 424; i++)
{
int row = i / 512;
int col = i % 512;
unsigned short depthValue = smoothDepthArray[row * 512 + col];
if (depthValue == 0)
{
i_after.data[i * 4] = 255;
i_after.data[i * 4 + 1] = 0;
i_after.data[i * 4 + 2] = 0;
i_after.data[i * 4 + 3] = depthValue / 256;
iZeroCountAfter++;
}
else
{
i_after.data[i * 4] = depthValue / 4500.0f * 256;
i_after.data[i * 4 + 1] = depthValue / 4500.0f * 256;
i_after.data[i * 4 + 2] = depthValue / 4500.0f * 256;
i_after.data[i * 4 + 3] = depthValue / 4500.0f * 256;
}
}
i_result.convertTo(i_result8U, CV_8U, 255.0f / 4500.0f); // uDepthMax
return i_result;
}
kinect 深度图与彩色图对齐程序的更多相关文章
- OpenNI1.5获取华硕XtionProLive深度图和彩色图并用OpenCV显示
华硕XtionPro类似Kinect,都是体感摄像机,可捕捉深度图和彩色图. 具体參数见:http://www.asus.com.cn/Multimedia/Xtion_PRO_LIVE/specif ...
- OpenNI2获取华硕XtionProLive深度图和彩色图并用OpenCV显示
使用OpenNI2打开XtionProLive时有个问题,彩色图分辨率不管怎样设置始终是320*240,深度图倒是能够设成640*480,而OpenNI1.x是能够获取640*480的彩色图的. 彩色 ...
- Android-将RGB彩色图转换为灰度图
package com.example.yanlei.wifi; import android.graphics.Bitmap; import android.graphics.BitmapFacto ...
- 基于.Net core3.0 开发的斗图小程序后端+斗图小程序
为啥要写这么一个小程序? 作为互联网的原住民. 90后程序员的我,从高中开始发QQ小表情. 到之后的熊猫头,蘑菇头. 可以说表情包陪伴我从学校到社会,从青少年到中年.. 而且因为斗图厉害,还找到一个女 ...
- QT 实现彩色图亮度均衡,RGB和HSI空间互相转换
从昨天折腾到今天.再折腾下去我都要上主楼了 大致和灰度图均衡是一样的,主要是不能像平滑什么的直接对R,G,B三个分量进行.这样出来的图像时没法看的.因此我们要对亮度进行均衡.而HSI彩色空间中的分量 ...
- perf + Flame Graph火焰图分析程序性能
1.perf命令简要介绍 性能调优时,我们通常需要分析查找到程序百分比高的热点代码片段,这便需要使用 perf record 记录单个函数级别的统计信息,并使用 perf report 来显示统计结果 ...
- [转]perf + 火焰图分析程序性能
1.perf命令简要介绍 性能调优时,我们通常需要分析查找到程序百分比高的热点代码片段,这便需要使用 perf record 记录单个函数级别的统计信息,并使用 perf report 来显示统计结果 ...
- perf + 火焰图分析程序性能
1.perf命令简要介绍 性能调优时,我们通常需要分析查找到程序百分比高的热点代码片段,这便需要使用 perf record 记录单个函数级别的统计信息,并使用 perf report 来显示统计结果 ...
- 用Html5制作的一款数学教学程序Function Graphics(绘制函数图的程序)
最近我不仅对游戏开发感兴趣,还对函数图感兴趣,特此我开发了这个程序.以下是一些介绍和下载演示地址,喜欢的朋友可以看看: 一,产品名片 产品名:Function Graphics 版本: 0.1 开发者 ...
随机推荐
- 20175202 《Java程序设计》第三周学习总结
20175209 2018-2019-2 <Java程序设计>第三周学习总结 教材知识点总结 1.编程语言发展阶段: 面向机器语言——面向过程语言——面向对象语言. 2.类声明: 类名必须 ...
- http请求返回响应码及意义
http 响应码及意义 HTTP状态码(HTTP Status Code)是用以表示网页服务器HTTP响应状态的3位数字代码.它由 RFC 2616 规范定义的,并得到RFC 2518.RFC 281 ...
- git 常用命令思维导图
- 工控随笔_10_西门子_WinCC的VBS脚本_01_基础入门
很多人都认为VB语言或者VBS脚本语言是一种很low的语言,从心里看不起VB或者VBS, 但是其实VBS不仅可以做为系统管理员的利器,同样在工控领域VBS语言大有用武之地. 西门子的WinCC提供了两 ...
- 猜测的rpc负载均衡原理,基于dubbo的架构
集群层(Cluster):封装多个提供者的路由及负载均衡,并桥接注册中心,以Invoker为中心,扩展接口为Cluster.Directory.Router和LoadBalance.将多个服务提供方组 ...
- CSS 不规则图形绘制
基础技能1 - 神奇的border 我们先来画一个长方形: .Rectangle { height: 100px; width: 200px; background: darkgray; border ...
- CF963D Frequency of String
https://codeforces.com/problemset/problem/123/D 题目大意 给一个字符串 \(s\),每次询问一个字符串 \(m_i\) 和一个正整数 \(k_i\),问 ...
- eclipse--常见问题
学习java的都知道这个编辑器,但这个编辑器的有很多功能很多人不知道怎么用,我系统的整理一下我学习过程中遇到的过的问题 1.eclipse如何导入external jar包? 参考:ht ...
- linux最小化安装后的初始化
Linux 最小化安装以后 linux会缺失很多功能,需要我们预先安装一些软件服务,例如mysql(mariadb),gcc等等. 但是最小化的mysql甚至不提供ifconfig,也没有wget命令 ...
- YUV介绍
YUV444与YUV422下采样. 一.YUV介绍 YUV有三个分量:Y(Luminance/Luma:亮度).U和V表示色差,体现的是图片的色彩信息.相对于RGB彩色空间,将亮度信息和色彩信息分离. ...