Achieve kinect+opencv to obtain depth and color data based on C++

  • 2020-05-07 20:04:57
  • OfStack

Development environment vs2010+ OPENCV2.4.10

First of all, download the latest Kinect 2 SDK   http: / / www microsoft. com/en - us/kinectforwindows/develop/downloads - docs. aspx

Do not insert Kinect after downloading, and preferably do not insert any USB devices other than the keyboard and mouse. After the installation is complete, you can go to "start" and find two newly installed programs, one that can display the Kinect depth map, and the other that can display various sample programs in SDK.

Enter the SDK installation directory, you can find the sample folder, which is written in four languages, native is C++, managed is C#, there are two other languages are not familiar with, I am familiar with C++, anyway, just try to use C++.

opencv+kinect .cpp


#include <opencv2\opencv.hpp> 
#include<iostream>
//windows The header file, must be, otherwise NuiApi.h It won't 
#include <Windows.h>
//Kinect for windows  The header file 
#include "NuiApi.h"
 
using namespace std;
using namespace cv;
 
#include <d3d11.h>
 
 
// The farthest distance (mm)
const int MAX_DISTANCE = 3500;
// The recent distance (mm)
const int MIN_DISTANCE = 200;
 
const LONG m_depthWidth = 640;
const LONG m_depthHeight = 480;
const LONG m_colorWidth = 640;
const LONG m_colorHeight = 480;
const LONG cBytesPerPixel = 4;
 
int main()
{
  // Color image 
  Mat image_rgb;
  // The depth of the image 
  Mat image_depth;
 
  // create 1 a MAT
  image_rgb.create(480,640,CV_8UC3);
  image_depth.create(480,640,CV_8UC1);
 
  //1 a KINECT Instance pointer 
  INuiSensor* m_pNuiSensor = NULL;
 
  if (m_pNuiSensor != NULL)
  {
    return 0;
  }
 
  // Record the current connection KINECT Number of connections (prepare for multiple connections) 
  int iSensorCount;
  // Get the current KINECT The number of 
  HRESULT hr = NuiGetSensorCount(&iSensorCount);
 
 
  // Initialize by sequence KINETC Instance, and there it is 1 a KINECT So there is no loop 
  hr = NuiCreateSensorByIndex(iSensorCount - 1, &m_pNuiSensor);
  // Initialize to receive color and depth data streams 
  hr = m_pNuiSensor->NuiInitialize(NUI_INITIALIZE_FLAG_USES_COLOR | NUI_INITIALIZE_FLAG_USES_DEPTH);
 
  // Make a mistake 
  if (FAILED(hr))
  {
    cout<<"NuiInitialize failed"<<endl;
    return hr;
  }
 
  // Get the color image 1 The frame event 
  HANDLE nextColorFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
  // Color image event handle 
  HANDLE colorStreamHandle = NULL;
  // Depth image acquisition 1 The frame event 
  HANDLE nextDepthFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
  // Depth image event handle 
  HANDLE depthStreamHandle = NULL;
 
  // The instance opens the data stream, here NUI_IMAGE_TYPE_COLOR Color image 
  hr = m_pNuiSensor->NuiImageStreamOpen(NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, 0,2,nextColorFrameEvent,&colorStreamHandle);
 
  if( FAILED( hr ) )// Determine if the extraction is correct 
  {
    cout<<"Could not open color image stream video"<<endl;
    m_pNuiSensor->NuiShutdown();
    return hr;
  }
 
  // The instance opens the data stream, here NUI_IMAGE_TYPE_DEPTH Depth image 
  hr = m_pNuiSensor->NuiImageStreamOpen(NUI_IMAGE_TYPE_DEPTH, NUI_IMAGE_RESOLUTION_640x480, 0,2, nextDepthFrameEvent, &depthStreamHandle);
 
  if( FAILED( hr ) )// Determine if the extraction is correct 
  {
    cout<<"Could not open color image stream video"<<endl;
    m_pNuiSensor->NuiShutdown();
    return hr;
  }
 
 
 
  cv::namedWindow("depth", CV_WINDOW_AUTOSIZE);
  moveWindow("depth",300,600);
  cv::namedWindow("colorImage",CV_WINDOW_AUTOSIZE);
  moveWindow("colorImage",0,200);
 
  while (1)
  {
    NUI_IMAGE_FRAME pImageFrame_rgb;
    NUI_IMAGE_FRAME pImageFrame_depth;
 
    // Infinite wait for new color data, wait until later return 
    if (WaitForSingleObject(nextColorFrameEvent, 0) == 0)
    {
      // Gets the frame data from the stream handle that just opened the data stream, and the read data address is stored at pImageFrame
      hr = m_pNuiSensor->NuiImageStreamGetNextFrame(colorStreamHandle, 0, &pImageFrame_rgb);
      if (FAILED(hr))
      {
        cout<<"Could not get color image"<<endl;
        m_pNuiSensor->NuiShutdown();
        return -1;
      }
 
      INuiFrameTexture *pTexture = pImageFrame_rgb.pFrameTexture;
      NUI_LOCKED_RECT lockedRect;
 
      // Extract data frame to LockedRect , which includes two data objects: pitch Number of bytes per line, pBits The first 1 Byte address 
      // And lock the data so that when we read the data, kinect I'm not going to change it 
 
 
      pTexture->LockRect(0, &lockedRect, NULL, 0);
      // Verify that the data obtained is valid 
      if (lockedRect.Pitch != 0)
      {
        // Convert the data to OpenCV the Mat format 
        for (int i = 0; i < image_rgb.rows; i++)
        {
          // The first i The pointer 
          uchar *prt = image_rgb.ptr(i);
 
          // Each byte represents 1 Color information, direct use uchar
          uchar *pBuffer = (uchar*)(lockedRect.pBits) + i * lockedRect.Pitch;
 
          for (int j = 0; j < image_rgb.cols; j++)
          {  
            prt[3 * j] = pBuffer[4 * j];// The internal data is 4 Bytes, 0-1-2 is BGR In the first 4 None is currently in use 
            prt[3 * j + 1] = pBuffer[4 * j + 1];
            prt[3 * j + 2] = pBuffer[4 * j + 2];
          }
        }
 
        imshow("colorImage",image_rgb);
        // unlocked 
        pTexture->UnlockRect(0);
        // Release the frame 
        m_pNuiSensor->NuiImageStreamReleaseFrame(colorStreamHandle, &pImageFrame_rgb );
      }
      else
      {
        cout<<"Buffer length of received texture is bogus\r\n"<<endl;
      }
 
      BOOL nearMode;
      INuiFrameTexture* pColorToDepthTexture; 
 
 
      // Depth image processing 
      if (WaitForSingleObject(nextDepthFrameEvent, INFINITE) == 0)
      {
 
        hr = m_pNuiSensor->NuiImageStreamGetNextFrame(depthStreamHandle, 0 , &pImageFrame_depth);
 
        if (FAILED(hr))
        {
          cout<<"Could not get color image"<<endl;
          NuiShutdown();
          return -1;
        }
 
        hr = m_pNuiSensor->NuiImageFrameGetDepthImagePixelFrameTexture( 
          depthStreamHandle, &pImageFrame_depth, &nearMode, &pColorToDepthTexture); 
        INuiFrameTexture *pTexture = pImageFrame_depth.pFrameTexture;
        NUI_LOCKED_RECT lockedRect;
        NUI_LOCKED_RECT ColorToDepthLockRect; 
 
        pTexture->LockRect(0, &lockedRect, NULL, 0);
        pColorToDepthTexture->LockRect(0,&ColorToDepthLockRect,NULL,0); 
 
        // Belong to the 1 the 
        for (int i = 0; i < image_depth.rows; i++)
        {
          uchar *prt = image_depth.ptr<uchar>(i);
 
          uchar* pBuffer = (uchar*)(lockedRect.pBits) + i * lockedRect.Pitch;
          // You need to convert here, because each depth data is 2 Bytes, should be BYTE into USHORT
          USHORT *pBufferRun = (USHORT*)pBuffer;
 
          for (int j = 0; j < image_depth.cols; j++)
          {
            // Let's go first. Let's go back 1 To the depth of the distance in 300mm-3500mm Range of pixels, mapped to [ 0 - 255 】, 
            // Out of range, all to do are edge pixels 
            if (pBufferRun[j] << 3 > MAX_DISTANCE) prt[j] = 255;
            else if(pBufferRun[j] << 3 < MIN_DISTANCE) prt[j] = 0;
            else prt[j] = (BYTE)(256 * (pBufferRun[j] << 3)/ MAX_DISTANCE);
          }
        }
        imshow("depth", image_depth);
 
 
 
        // Next comes the alignment section, which pulls out the foreground 
 
        // Store the depth point parameter 
        NUI_DEPTH_IMAGE_POINT* depthPoints = new NUI_DEPTH_IMAGE_POINT[640 * 480];
        if (ColorToDepthLockRect.Pitch != 0) 
        { 
          HRESULT hrState = S_OK; 
           
          //1 Class that can transform in different spatial coordinates (including: depth, color, bone) 
          INuiCoordinateMapper* pMapper; 
 
          // Set up the KINECT The spatial coordinate system of the instance 
          hrState = m_pNuiSensor->NuiGetCoordinateMapper(&pMapper); 
 
          if (FAILED(hrState)) 
          { 
            return hrState; 
          } 
 
          // important 1 Step: map from color space to depth space. Parameter description: 
          // The parameter 1 ] : type of color image 
          // The parameter 2 [color image resolution 
          // The parameter 3 ] : resolution of depth image 
          // The parameter 4 ] : number of depth images 
          // The parameter 5 Depth pixel points 
          // The parameter 6 ] : take the size of memory, number. A type of NUI_DEPTH_IMAGE_PIXEL
          // The parameter 7 ] : store the parameters of the mapping result point 
          hrState = pMapper->MapColorFrameToDepthFrame(NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, NUI_IMAGE_RESOLUTION_640x480, 
            640 * 480, (NUI_DEPTH_IMAGE_PIXEL*)ColorToDepthLockRect.pBits,640 * 480, depthPoints); 
 
          if (FAILED(hrState)) 
          { 
            return hrState; 
          } 
 
 
          // Display image 
          Mat show;
          show.create(480,640,CV_8UC3);
          show = 0;
 
          for (int i = 0; i < image_rgb.rows; i++)
          {
            for (int j = 0; j < image_rgb.cols; j++)
            {
              uchar *prt_rgb = image_rgb.ptr(i);
              uchar *prt_show = show.ptr(i);
              // Offset in memory 
              long index = i * 640 + j; 
              // Gets the points from the array that holds the mapping coordinates 
              NUI_DEPTH_IMAGE_POINT depthPointAtIndex = depthPoints[index]; 
 
              // Border judgment 
              if (depthPointAtIndex.x >= 0 && depthPointAtIndex.x < image_depth.cols &&
                depthPointAtIndex.y >=0 && depthPointAtIndex.y < image_depth.rows)
              {
                // Depth judgment, in MIN_DISTANCE with MAX_DISTANCE Between the foreground as shown 
                // This use is also important, when using the true depth pixels and then getting the depth value in the depth image to judge, will be wrong 
                if (depthPointAtIndex.depth >= MIN_DISTANCE && depthPointAtIndex.depth <= MAX_DISTANCE)
                {
                  prt_show[3 * j]   = prt_rgb[j * 3];
                  prt_show[3 * j + 1] = prt_rgb[j * 3 + 1];
                  prt_show[3 * j + 2] = prt_rgb[j * 3 + 2];
                }
              }
            }
          }
          imshow("show", show);
        }
 
        delete []depthPoints;
         
        pTexture->UnlockRect(0);
        m_pNuiSensor->NuiImageStreamReleaseFrame(depthStreamHandle, &pImageFrame_depth);
      }
 
      else
      {
        cout<<"Buffer length of received texture is bogus\r\n"<<endl;
      }
    }
 
    if (cvWaitKey(20) == 27)
      break;
  }
  return 0;
}


Related articles: