void SaveCameraParams();
openni::Device mDevice;
openni::VideoStream mColorStream;
openni::VideoStream mDepthStream;
struct ObCameraParams
{
ObCameraParams();
ObCameraParams(float c_x_, float c_y_, float f_x_, float f_y_);
float c_x;
float c_y;
float f_x;
float f_y;
};
ObCameraParams mIrParam;
ObCameraParams::ObCameraParams()
: c_x(320.0), c_y(240.0), f_x(480.0), f_y(480.0)
{
}
ObCameraParams::ObCameraParams(float c_x_, float c_y_, float f_x_, float f_y_)
: c_x(c_x_), c_y(c_y_), f_x(f_x_), f_y(f_y_)
{
}
bool init()
{
if (openni::OpenNI::initialize() != openni::STATUS_OK)
{
std::cerr << "OpenNI Initial Error: " << openni::OpenNI::getExtendedError() << std::endl;
return false;
}
if (mDevice.open(openni::ANY_DEVICE) != openni::STATUS_OK)
{
std::cerr << "Can't Open Device: " << openni::OpenNI::getExtendedError() << std::endl;
return false;
}
return true;
}
bool createColorStream()
{
if (mDevice.hasSensor(openni::SENSOR_COLOR))
{
if (mColorStream.create(mDevice, openni::SENSOR_COLOR) == openni::STATUS_OK)
{
openni::VideoMode mMode;
mMode.setResolution(640, 480);
mMode.setFps(30);
mMode.setPixelFormat(openni::PIXEL_FORMAT_RGB888);
if (mColorStream.setVideoMode(mMode) != openni::STATUS_OK) {
std::cout << "Can't apply VideoMode: " << openni::OpenNI::getExtendedError() << std::endl;
return false;
}
}
else
{
std::cerr << "Can't create color stream on device: " << openni::OpenNI::getExtendedError() << std::endl;
return false;
}
mColorStream.start();
return true;
}
return false;
}
bool createDepthStream()
{
if (mDevice.hasSensor(openni::SENSOR_DEPTH))
{
if (mDepthStream.create(mDevice, openni::SENSOR_DEPTH) == openni::STATUS_OK)
{
openni::VideoMode mMode;
mMode.setResolution(640, 480);
mMode.setFps(30);
mMode.setPixelFormat(openni::PIXEL_FORMAT_DEPTH_1_MM);
if (mDepthStream.setVideoMode(mMode) != openni::STATUS_OK) {
std::cout << "Can't apply VideoMode to depth stream: " << openni::OpenNI::getExtendedError() << std::endl;
return false;
}
}
else
{
std::cerr << "Can't create depth stream on device: " << openni::OpenNI::getExtendedError() << std::endl;
return false;
}
mDepthStream.start();
if (mDevice.isImageRegistrationModeSupported(openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR))
mDevice.setImageRegistrationMode(openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR);
else
std::cerr << "Don't support registration" << std::endl;
return true;
}
else
{
std::cerr << "ERROR: This device does not have depth sensor" << std::endl;
return false;
}
SaveCameraParams();
}
void SaveCameraParams()
{
OBCameraParams cameraParam;
int dataSize = sizeof(cameraParam);
memset(&cameraParam, 0, sizeof(cameraParam));
openni::Status rc = mDevice.getProperty(OBEXTENSION_ID_CAM_PARAMS, (uint8_t*)&cameraParam, &dataSize);
if (rc != openni::STATUS_OK)
return;
mIrParam.f_x = cameraParam.l_intr_p[0];
mIrParam.f_y = cameraParam.l_intr_p[1];
mIrParam.c_x = cameraParam.l_intr_p[2];
mIrParam.c_y = cameraParam.l_intr_p[3];
}
void LoadCameraParams(float& fdx, float& fdy, float& u0, float& v0, int width, int height)
{
fdx = mIrParam.f_x * (width / 640);
fdy = mIrParam.f_y * (height / 480);
u0 = mIrParam.c_x * (width / 640);
v0 = mIrParam.c_y * (height / 480);
}
void SaveFrame(const openni::VideoFrameRef& frame, openni::RGB888Pixel* pColor, pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_XYZRGB)
{
if (!frame.isValid())
return;
int nRows = frame.getHeight();
int nCols = frame.getWidth();
openni::DepthPixel* pDepth = (openni::DepthPixel*)frame.getData();
if (!pDepth)
return;
float tx, ty, fdx, fdy, u0, v0, x, y, z, scale = 0.001f;
LoadCameraParams(fdx, fdy, u0, v0, nCols, nRows);
int nIdx = 0;
for (int v = 0; v < nRows; ++v)
{
for (int u = 0; u < nCols; ++u)
{
uint16_t depth = pDepth[v * nCols + u];
tx = (u - u0) / fdx;
ty = (v - v0) / fdy;
x = -depth * tx * scale;
y = depth * ty * scale;
z = depth * scale;
cloud_XYZRGB->points[nIdx].x = x;
cloud_XYZRGB->points[nIdx].y = y;
cloud_XYZRGB->points[nIdx].z = z;
cloud_XYZRGB->points[nIdx].r = pColor[nIdx].r;
cloud_XYZRGB->points[nIdx].g = pColor[nIdx].g;
cloud_XYZRGB->points[nIdx].b = pColor[nIdx].b;
++nIdx;
}
}
}
void GetFrame(pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_XYZRGB)
{
openni::VideoFrameRef colorFrame;
mColorStream.readFrame(&colorFrame);
openni::RGB888Pixel* pColor = (openni::RGB888Pixel*)colorFrame.getData();
openni::VideoFrameRef mDepthFrame;
if (mDepthStream.readFrame(&mDepthFrame) == openni::STATUS_OK)
{
SaveFrame(mDepthFrame, pColor, cloud_XYZRGB);
}
}
bool getCloudXYZCoordinate(pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_XYZRGB)
{
openni::VideoFrameRef colorFrame;
mColorStream.readFrame(&colorFrame);
openni::RGB888Pixel* pColor = (openni::RGB888Pixel*)colorFrame.getData();
openni::VideoFrameRef mDepthFrame;
if (mDepthStream.readFrame(&mDepthFrame) == openni::STATUS_OK)
{
float fx, fy, fz;
int i = 0;
double fScale = 0.001f;
openni::DepthPixel* pDepthArray = (openni::DepthPixel*)mDepthFrame.getData();
for (int y = 0; y < mDepthFrame.getHeight(); y++)
{
for (int x = 0; x < mDepthFrame.getWidth(); x++)
{
int idx = x + y * mDepthFrame.getWidth();
const openni::DepthPixel rDepth = pDepthArray[idx];
openni::CoordinateConverter::convertDepthToWorld(mDepthStream, x, y, rDepth, &fx, &fy, &fz);
fx = -fx;
fy = -fy;
cloud_XYZRGB->points[i].x = fx * fScale;
cloud_XYZRGB->points[i].y = fy * fScale;
cloud_XYZRGB->points[i].z = fz * fScale;
cloud_XYZRGB->points[i].r = pColor[i].r;
cloud_XYZRGB->points[i].g = pColor[i].g;
cloud_XYZRGB->points[i].b = pColor[i].b;
i++;
}
}
return true;
}
else
{
std::cout << "getCloudXYZCoordinate: fail to read frame from depth stream" << std::endl;
return false;
}
}
int main()
{
if (!init()) {
std::cout << "Fail to init ..." << std::endl;
return -1;
}
if (createColorStream() && createDepthStream())
{
std::cout << "displayPointCloud: create color stream and depth stream ..." << std::endl;
}
else
{
std::cout << "displayPointCloud: can not create color stream and depth stream ..." << std::endl;
return -1;
}
pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_XYZRGB(new pcl::PointCloud<pcl::PointXYZRGB>());
cloud_XYZRGB->width = 640;
cloud_XYZRGB->height = 480;
cloud_XYZRGB->points.resize(static_cast<size_t>(cloud_XYZRGB->width) * static_cast<size_t>(cloud_XYZRGB->height));
pcl::visualization::PCLVisualizer::Ptr m_pViewer(new pcl::visualization::PCLVisualizer("Viewer"));
m_pViewer->setCameraPosition(0, 0, -2, 0, -1, 0, 0);
m_pViewer->addCoordinateSystem(0.3);
while (!m_pViewer->wasStopped()) {
GetFrame(cloud_XYZRGB);
pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> rgb(cloud_XYZRGB);
m_pViewer->addPointCloud<pcl::PointXYZRGB>(cloud_XYZRGB, rgb, "cloud");
m_pViewer->spinOnce();
m_pViewer->removeAllPointClouds();
}
mColorStream.destroy();
mDepthStream.destroy();
mDevice.close();
openni::OpenNI::shutdown();
return 0;
}