DUO Dense3D Samples


  Developer Preview - This may change with updates.

Overview


This article is a review of the C++ Dense3D samples that ship with the DUO SDK. This will be updated as new features are added or specifications updated to the DUO API. In the Developers/Samples folder within the SDK download you can find the latest version of these samples with a cmake project to generate specific IDE projects (Visual Studio/Qt Creator).


Prerequisites


Before reading this guide it is recommended to review our API and SDK Docs to get an understanding of the design, common practices and usage. With DUO devices you will always receive a "frame" which contains all relevant sensor data. The following examples showcase the usage of the DUOFrame structure while using different device functions. Also make sure you have the latest SDK download.

Understanding of the DUO Dense3D API which provides high level processor for generation of a depth map from the image frames.

  • DUOLib.lib & DUOLib dynamic library that match your compiler and architecture (x86/x64)
  • DUO Dense3D Libraries
  • OpenCV v2.4.7+
  • CMake 2.8+

CMake


Methods/Structures


Here are some of the Device API methods used through-out the examples:

And Dense3D API specifics methods:


Include/Linkage


The including/linkage of the DUOLib and Dense3D libraries (which may vary dependant on compiler).

#include "DUOLib.h"
#include "Dense3D.h"
#pragma comment(lib, "DUOLib.lib")
#pragma comment(lib, "Dense3DLib.lib")

Sample 01


Generating a depth map with Dense3D Middleware



In this sample we showcase how to extract depth via our Dense3D API and libraries.

Here are some of the key Dense3D functions/structures:

  • Dense3DOpen - Initializes the Dense3D processor.
  • Dense3DGetDepth - Returns a depth map based on processor results.
  • GetDense3DNumDisparities - Returns the current number of disparities.

Sample.h


#ifndef SAMPLE_H
#define SAMPLE_H

// Include some generic header files
...

// Include DUO API header file
#include 

#include 
using namespace cv;

// Some global variables
static DUOInstance _duo = NULL;
static PDUOFrame _pFrameData = NULL;

...

// One and only duo callback function
// It sets the current frame data and signals that the new frame data is ready
static void CALLBACK DUOCallback(const PDUOFrame pFrameData, void *pUserData)
{
    _pFrameData = pFrameData;
    SetEvent(_evFrame);
}

// Opens, sets current image format and fps and starts capturing
static bool OpenDUOCamera(int width, int height, float fps)
{
    if(_duo != NULL)
    {
        // Stop capture
        StopDUO(_duo);
        // Close DUO
        CloseDUO(_duo);
        _duo = NULL;
    }

    // Find optimal binning parameters for given (width, height)
    // This maximizes sensor imaging area for given resolution
    int binning = DUO_BIN_NONE;
    if(width <= 752/2) 
        binning += DUO_BIN_HORIZONTAL2;
    else if(width <= 752/4) 
        binning += DUO_BIN_HORIZONTAL4;
    if(height <= 480/4) 
        binning += DUO_BIN_VERTICAL4;
    else if(height <= 480/2) 
        binning += DUO_BIN_VERTICAL2;

    // Check if we support given resolution (width, height, binning, fps)
    DUOResolutionInfo ri;
    if(!EnumerateDUOResolutions(&ri, 1, width, height, binning, fps))
        return false;

    if(!OpenDUO(&_duo))
        return false;

    char tmp[260];
    // Get and print some DUO parameter values
    GetDUODeviceName(_duo,tmp);
    printf("DUO Device Name:      '%s'\n", tmp);
    GetDUOSerialNumber(_duo, tmp);
    printf("DUO Serial Number:    %s\n", tmp);
    GetDUOFirmwareVersion(_duo, tmp);
    printf("DUO Firmware Version: v%s\n", tmp);
    GetDUOFirmwareBuild(_duo, tmp);
    printf("DUO Firmware Build:   %s\n", tmp);

    // Set selected resolution
    SetDUOResolutionInfo(_duo, ri);

    // Start capture
    if(!StartDUO(_duo, DUOCallback, NULL))
        return false;
    return true;
}

// Waits until the new DUO frame is ready and returns it
static PDUOFrame GetDUOFrame()
{
    if(_duo == NULL) 
        return NULL;
    if(WaitForSingleObject(_evFrame, 1000) == WAIT_OBJECT_0)
        return _pFrameData;
    else
        return NULL;
}

// Stops capture and closes the camera
static void CloseDUOCamera()
{
    if(_duo == NULL)
        return;
    // Stop capture
    StopDUO(_duo);
    // Close DUO
    CloseDUO(_duo);
    _duo = NULL;
}

static void SetExposure(float value)
{
    if(_duo == NULL)
        return;
    SetDUOExposure(_duo, value);
}

static void SetGain(float value)
{
    if(_duo == NULL)
        return;
    SetDUOGain(_duo, value);
}

static void SetLed(float value)
{
    if(_duo == NULL)
        return;
    SetDUOLedPWM(_duo, value);
}

static void SetVFlip(bool value)
{
    if(_duo == NULL)
        return;
    SetDUOVFlip(_duo, value);
}

static void SetCameraSwap(bool value)
{
    if(_duo == NULL)
        return;
    SetDUOCameraSwap(_duo, value);
}

static void SetUndistort(bool value)
{
    if(_duo == NULL)
        return;
    SetDUOUndistort(_duo, value);
}

static bool GetCameraStereoParameters(DUO_STEREO *stereo)
{
    if(_duo == NULL)
        return false;
    return GetDUOStereoParameters(_duo, stereo);
}

#endif // SAMPLE_H

Sample.cpp


Considering the header file is mainly helper functions and includes in the actual implementation we go through the following steps:

  • Step 1 - Create DUO and Dense3D Instances
  • Step 2 - Configure DUO and Dense3D Parameters
  • Step 3 - Get DUO Frame (Left and Right Image Data) and pass it to Dense3D
  • Step 4 - Render the disparity map using the color lookup table

#include "Sample.h"
// Include Dense3D API header file
#include 

#define WIDTH    320
#define HEIGHT    240
#define FPS        30

// Hue map for colorizing the disparity map
Vec3b HSV2RGB(float hue, float sat, float val)
{
    float x, y, z;

    if(hue == 1) hue = 0;
    else         hue *= 6;

    int i = static_cast(floorf(hue));
    float f = hue - i;
    float p = val * (1 - sat);
    float q = val * (1 - (sat * f));
    float t = val * (1 - (sat * (1 - f)));

    switch(i)
    {
        case 0: x = val; y = t; z = p; break;
        case 1: x = q; y = val; z = p; break;
        case 2: x = p; y = val; z = t; break;
        case 3: x = p; y = q; z = val; break;
        case 4: x = t; y = p; z = val; break;
        case 5: x = val; y = p; z = q; break;
    }
    return Vec3b((uchar)(x * 255), (uchar)(y * 255), (uchar)(z * 255));
}

int main(int argc, char* argv[])
{
    // Build color lookup table for disparity display
    Mat colorLut = Mat(cv::Size(256, 1), CV_8UC3);
    for(int i = 0; i < 256; i++)
        colorLut.at(i) = (i==0) ? Vec3b(0, 0, 0) : HSV2RGB(i/256.0f, 1, 1);

    // Open DUO camera and start capturing
    if(!OpenDUOCamera(WIDTH, HEIGHT, FPS))
    {
        printf("Could not open DUO camera\n");
        return 0;
    }

    printf("DUOLib Version:       v%s\n", GetDUOLibVersion());
    printf("Dense3D Version:      v%s\n", Dense3DGetLibVersion());

    // Open Dense3D
    Dense3DInstance dense3d;
    if(!Dense3DOpen(&dense3d))
    {
        printf("Could not open Dense3D library\n");
        // Close DUO camera
        CloseDUOCamera();
        return 0;
    }
    // Set the Dense3D license (visit https://duo3d.com/account)
    if(!SetDense3DLicense(dense3d, "XXXXX-XXXXX-XXXXX-XXXXX-XXXXX")) // <-- Put your Dense3D license
    {
        printf("Invalid or missing Dense3D license. To get your license visit https://duo3d.com/account\n");
        // Close DUO camera
        CloseDUOCamera();
        // Close Dense3D library
        Dense3DClose(dense3d);
        return 0;
    }
    // Set the image size
    if(!SetDense3DImageSize(dense3d, WIDTH, HEIGHT))
    {
        printf("Invalid image size\n");
        // Close DUO camera
        CloseDUOCamera();
        // Close Dense3D library
        Dense3DClose(dense3d);
        return 0;
    }
    // Get DUO calibration intrinsics and extrinsics
    DUO_STEREO params;
    if(!GetCameraStereoParameters(&params))
    {
        printf("Could not get DUO camera calibration data\n");
        // Close DUO camera
        CloseDUOCamera();
        // Close Dense3D library
        Dense3DClose(dense3d);
        return 1;
    }
    // Set Dense3D parameters
    SetDense3DScale(dense3d, 3);
    SetDense3DMode(dense3d, 0);
    SetDense3DCalibration(dense3d, &params);
    SetDense3DNumDisparities(dense3d, 4);
    SetDense3DSADWindowSize(dense3d, 6);
    SetDense3DPreFilterCap(dense3d, 28);
    SetDense3DUniquenessRatio(dense3d, 27);
    SetDense3DSpeckleWindowSize(dense3d, 52);
    SetDense3DSpeckleRange(dense3d, 14);

    // Set exposure, LED brightness and camera orientation
    SetExposure(85);
    SetLed(25);
    SetVFlip(true);
    // Enable retrieval of undistorted (rectified) frames
    SetUndistort(true);

    // Create Mat for disparity and depth map
    Mat1f disparity = Mat(Size(WIDTH, HEIGHT), CV_32FC1);
    Mat3f depth3d = Mat(Size(WIDTH, HEIGHT), CV_32FC3);

    // Run capture loop until  key is pressed
    while((cvWaitKey(1) & 0xff) != 27)
    {
        // Capture DUO frame
        PDUOFrame pFrameData = GetDUOFrame();
        if(pFrameData == NULL) continue;

        // Create Mat for left & right frames
        Mat left = Mat(Size(WIDTH, HEIGHT), CV_8UC1, pFrameData->leftData);
        Mat right = Mat(Size(WIDTH, HEIGHT), CV_8UC1, pFrameData->rightData);

        // Process Dense3D depth map here
        if(Dense3DGetDepth(dense3d, pFrameData->leftData, pFrameData->rightData, 
                          (float*)disparity.data, (PDense3DDepth)depth3d.data))
        {
            uint32_t disparities;
            GetDense3DNumDisparities(dense3d, &disparities);
            Mat disp8;
            disparity.convertTo(disp8, CV_8UC1, 255.0/(disparities*16));
            Mat mRGBDepth;
            cvtColor(disp8, mRGBDepth, COLOR_GRAY2BGR);
            LUT(mRGBDepth, colorLut, mRGBDepth);
            imshow("Dense3D Disparity Map", mRGBDepth);
        }
        // Display images
        imshow("Left Image", left);
        imshow("Right Image", right);
    }
    destroyAllWindows();

    // Close Dense3D library
    Dense3DClose(dense3d);
    // Close DUO camera
    CloseDUOCamera();
    return 0;
}

Sample 02


Generating a depth map with Dense3DMT Middleware



In this sample we showcase how to extract depth via our Dense3DMT API and libraries.

Here are some of the key Dense3D functions/structures:

  • Dense3DOpen - Initializes the Dense3D processor.
  • Dense3DGetDepth - Returns a depth map based on processor results.
  • GetDense3DNumDisparities - Returns the current number of disparities.

Sample.h


#ifndef SAMPLE_H
#define SAMPLE_H

#include 
#include 

// Include DUO API header file
#include 
// Include Dense3D API header file
#include 

#include 
using namespace cv;

typedef struct
{
    Mat leftImg, rightImg;
    Mat disparity;
    Mat depth;
}D3DFrame;

// This class is responsible for queueing Dense3D frames
// since Dense3DMT uses asynchronous callback mechanism to
// pass the Dense3D frames to us
class Dense3DFrameQueue 
{
    std::mutex mutex;
    std::queue queue;
public:
    void push(D3DFrame frame)
    {
        mutex.lock();
        queue.push(frame);
        mutex.unlock();
    }
    bool pop(D3DFrame &frame)
    {
        mutex.lock();
        if(queue.size() == 0)
        {
            mutex.unlock();
            return false;
        }
        frame = queue.front();
        queue.pop();
        mutex.unlock();
        return true;
    }
};
#endif // SAMPLE_H

Sample.cpp


To implement simple DUO disparity processing in this code sample, we go through the following steps:

  • Step 1 - Create DUO and Dense3D Instances
  • Step 2 - Configure DUO and Dense3D Parameters
  • Step 3 - Start DUO and Dense3D capturing and processing
  • Step 4 - Render the disparity map using the color lookup table

#include "Sample.h"

#define WIDTH    320
#define HEIGHT    240
#define FPS        30

// Hue map for colorizing the disparity map
Vec3b HSV2RGB(float hue, float sat, float val)
{
    float x, y, z;

    if(hue == 1) hue = 0;
    else         hue *= 6;

    int i = static_cast(floorf(hue));
    float f = hue - i;
    float p = val * (1 - sat);
    float q = val * (1 - (sat * f));
    float t = val * (1 - (sat * (1 - f)));

    switch(i)
    {
        case 0: x = val; y = t; z = p; break;
        case 1: x = q; y = val; z = p; break;
        case 2: x = p; y = val; z = t; break;
        case 3: x = p; y = q; z = val; break;
        case 4: x = t; y = p; z = val; break;
        case 5: x = val; y = p; z = q; break;
    }
    return Vec3b((uchar)(x * 255), (uchar)(y * 255), (uchar)(z * 255));
}

int main(int argc, char* argv[])
{
    // Build color lookup table for disparity display
    Mat colorLut = Mat(cv::Size(256, 1), CV_8UC3);
    for(int i = 0; i < 256; i++)
        colorLut.at(i) = (i==0) ? Vec3b(0, 0, 0) : HSV2RGB(i/256.0f, 1, 1);

    // Open Dense3D
    Dense3DMTInstance dense3d;
    if(!Dense3DOpen(&dense3d, duo))
    {
        printf("Could not open Dense3DMT library\n");
        return 1;
    }

    // Get associated DUOInstance
    DUOInstance duo = GetDUOInstance(dense3d);

    char tmp[260];
    GetDUODeviceName(duo,tmp);
    printf("DUO Device Name:      '%s'\n", tmp);
    GetDUOSerialNumber(duo, tmp);
    printf("DUO Serial Number:    %s\n", tmp);
    GetDUOFirmwareVersion(duo, tmp);
    printf("DUO Firmware Version: v%s\n", tmp);
    GetDUOFirmwareBuild(duo, tmp);
    printf("DUO Firmware Build:   %s\n", tmp);
    printf("DUOLib Version:       v%s\n", GetLibVersion());
    printf("Dense3DMT Version:    v%s\n", Dense3DGetLibVersion());

    // Set the Dense3D license (visit https://duo3d.com/account)
    if(!SetDense3DLicense(dense3d, "XXXXX-XXXXX-XXXXX-XXXXX-XXXXX")) // <-- Put your Dense3D license
    {
        printf("Invalid or missing Dense3D license. To get your license visit https://duo3d.com/account\n");
        // Close Dense3D library
        Dense3DClose(dense3d);
        return 1;
    }
    // Set the image size
    if(!SetDense3DImageInfo(dense3d, WIDTH, HEIGHT, FPS))
    {
        printf("Invalid image size\n");
        // Close Dense3D library
        Dense3DClose(dense3d);
        return 1;
    }

    // Set Dense3D parameters
    Dense3DParams params;
    params.scale = 0;
    params.mode = 0;
    params.numDisparities = 2;
    params.sadWindowSize = 6;
    params.preFilterCap = 28;
    params.uniqenessRatio = 27;
    params.speckleWindowSize = 52;
    params.speckleRange = 14;
    if(!SetDense3Params(dense3d, params))
    {
        printf("GetDense3Params error\n");
        // Close Dense3D library
        Dense3DClose(dense3d);
        return 1;
    }

    // Queue used to receive Dense3D frames
    Dense3DFrameQueue d3dq;

    // Start DUO capture and Dense3D processing
    if(!Dense3DStart(dense3d, [](const PDense3DFrame pFrameData, void *pUserData)
                    {
                        D3DFrame frame;
                        Size frameSize(pFrameData->duoFrame->width, pFrameData->duoFrame->height);
                        frame.leftImg = Mat(frameSize, CV_8U, pFrameData->duoFrame->leftData);
                        frame.rightImg = Mat(frameSize, CV_8U, pFrameData->duoFrame->rightData);
                        frame.disparity = Mat(frameSize, CV_32F, pFrameData->disparityData);
                        frame.depth = Mat(frameSize, CV_32FC3, pFrameData->depthData);
                        ((Dense3DFrameQueue*)pUserData)->push(frame);
                    }, &d3dq))
    {
        printf("Dense3DStart error\n");
        // Close Dense3D library
        Dense3DClose(dense3d);
        return 1;
    }

    // Set exposure, LED brightness and camera orientation
    SetDUOExposure(duo, 85);
    SetDUOLedPWM(duo, 28);
    SetDUOVFlip(duo, true);

    // Run capture loop until  key is pressed
    while((cvWaitKey(1) & 0xff) != 27)
    {
        D3DFrame d3DFrame;
        if(!d3dq.pop(d3DFrame))
            continue;

        Mat disp8;
        d3DFrame.disparity.convertTo(disp8, CV_8UC1, 255.0/(params.numDisparities*16));
        Mat rgbBDisparity;
        cvtColor(disp8, rgbBDisparity, COLOR_GRAY2BGR);
        LUT(rgbBDisparity, colorLut, rgbBDisparity);

        // Display images
        imshow("Left Image", d3DFrame.leftImg);
        imshow("Right Image", d3DFrame.rightImg);
        imshow("Disparity Image", rgbBDisparity);
    }
    destroyAllWindows();

    Dense3DStop(dense3d);
    Dense3DClose(dense3d);
    return 0;
}

Sample 03


Rendering 3D point cloud with Dense3DMT Middleware and OpenGL/GLUT



In this sample we showcase how to extract depth via our Dense3DMT API and libraries.

Here are some of the key Dense3D Point Cloud functions/structures:

  • Dense3DOpen - Initializes the Dense3D processor.
  • Dense3DStart - Starts DUO and Dense3D capture and processing.
  • CloudViewer - Displays 3D point cloud in OpenGL window.

Sample.h


This file contains minimal implementation of 3D Poind Cloud viewer used in this code sample.

#ifndef SAMPLE_H
#define SAMPLE_H

#if defined(WIN32)
#include "GL/glut.h"
#elif defined(__linux__)
#include 
#elif defined(__APPLE__)
#include 
#endif

#include 
#include 
#include 

// Include DUO API header file
#include 
// Include Dense3D API header file
#include 

#include 
using namespace cv;

namespace cl 
{
    class Vec
    {
    public:
        union
        {
            struct { double x, y, z; };
            double v_[3];
        };
    public:
        Vec() : x(0.0), y(0.0), z(0.0) {}
        Vec(double X, double Y, double Z) : x(X), y(Y), z(Z) {}
        Vec(cv::Vec3b v) : x(v.val[0]), y(v.val[1]), z(v.val[2]) {}

        Vec& operator=(const Vec& v)
        {
            x = v.x;   y = v.y;   z = v.z;
            return *this;
        }
        double operator[](int i) const { return v_[i]; }
        double& operator[](int i) {    return v_[i]; }
        operator const double*() const { return v_;    }
        operator double*() { return v_; }

        friend Vec operator+(const Vec &a, const Vec &b)
        {
            return Vec(a.x + b.x, a.y + b.y, a.z + b.z);
        }
        friend Vec operator-(const Vec &a, const Vec &b)
        {
            return Vec(a.x - b.x, a.y - b.y, a.z - b.z);
        }
        friend Vec operator-(const Vec &a)
        {
            return Vec(-a.x, -a.y, -a.z);
        }
        friend Vec operator*(const Vec &a, double k)
        {
            return Vec(a.x * k, a.y * k, a.z * k);
        }
        friend Vec operator*(double k, const Vec &a)
        {
            return a * k;
        }
        friend Vec operator/(const Vec &a, double k)
        {
            return Vec(a.x / k, a.y / k, a.z / k);
        }
        friend bool operator!=(const Vec &a, const Vec &b)
        {
            return !(a==b);
        }
        friend bool operator==(const Vec &a, const Vec &b)
        {
            const double epsilon = 1.0E-10f;
            return (a-b).squaredNorm() < epsilon;
        }
        Vec& operator+=(const Vec &a)
        {
            x += a.x; y += a.y; z += a.z;
            return *this;
        }
        Vec& operator-=(const Vec &a)
        {
            x -= a.x; y -= a.y; z -= a.z;
            return *this;
        }
        Vec& operator*=(double k)
        {
            x *= k; y *= k; z *= k;
            return *this;
        }
        Vec& operator/=(double k)
        {
            x /= k; y /= k; z /= k;
            return *this;
        }
        friend double operator*(const Vec &a, const Vec &b)
        {
            return a.x * b.x + a.y * b.y + a.z * b.z;
        }
        friend Vec operator^(const Vec &a, const Vec &b)
        {
            return cross(a, b);
        }
        friend Vec cross(const Vec &a, const Vec &b)
        {
            return Vec(a.y * b.z - a.z * b.y,
                a.z * b.x - a.x * b.z,
                a.x * b.y - a.y * b.x);
        }
        double squaredNorm() const { return x * x + y * y + z * z; }
        double norm() const { return sqrt(x * x + y * y + z * z); }
        double normalize()
        {
            const double n = norm();
            *this /= n;
            return n;
        }
        void rotate(double angle, Vec axis)
        {
            double rad = angle * (3.14159265358979323846 / 180.0);
            double cc = cos(rad);
            double ss = sin(rad);
            double a = axis.x * axis.x + (1 - axis.x * axis.x) * cc;
            double b = axis.x * axis.y * (1 - cc) - axis.z * ss;
            double c = axis.x * axis.z * (1 - cc) + axis.y * ss;
            double d = axis.x * axis.y * (1 - cc) + axis.z * ss;
            double e = axis.y * axis.y + (1 - axis.y * axis.y) * cc;
            double f = axis.y * axis.z * (1 - cc) - axis.x * ss;
            double g = axis.x * axis.z * (1 - cc) - axis.y * ss;
            double h = axis.y * axis.z * (1 - cc) + axis.x * ss;
            double i = axis.z * axis.z + (1 - axis.z * axis.z) * cc;
            double nx = x * a + y * b + z * c;
            double ny = x * d + y * e + z * f;
            double nz = x * g + y * h + z * i;
            x = nx;
            y = ny;
            z = nz;
        }
        static double angle(Vec a, Vec c, Vec b)
        {
            double s = acos((a-c) * (c-b) / ((a-c).norm() * (c-b).norm()));
            return s * (180.0 / 3.14159265358979323846);
        }
    };

    class TrackballCamera
    {
        Vec _position, _lookAt, _forward, _up, _left;
        double _angleX;
    public:
        TrackballCamera() {}
        TrackballCamera(Vec position, Vec lookat)
        {
            _position = position;
            _lookAt = lookat;
            _angleX = 0.0;
            update();
        }
        void update()
        {
            _forward = _lookAt - _position;
            _left = Vec(_forward.z, 0, -_forward.x);
            _up = cross(_left, _forward);
            _forward.normalize();
            _left.normalize();
            _up.normalize();
        }
        void show()
        {
            gluLookAt(_position.x, _position.y, _position.z,
                _lookAt.x, _lookAt.y, _lookAt.z,
                0.0, 1.0, 0.0);
        }
        void rotate(double pos, Vec v)
        {
            Vec prevPos = _position;
            translate(-_lookAt);
            _position.rotate(pos / 500.0, v);
            translate(_lookAt);
            updateAngleX();
            if(_angleX < 5 || _angleX > 175)
            {
                _position = prevPos;
                updateAngleX();
            }
        }
        void translate(Vec v)
        {
            _position += v;
        }
        void translateLookAt(Vec v)
        {
            _lookAt += v;
        }
        void translateAll(Vec v)
        {
            translate(v);
            translateLookAt(v);
        }
        void zoom(double z)
        {
            double dist = (_position - _lookAt).norm();
            if (dist - z > z)
                translate(_forward * z);
        }
        Vec getPosition() { return _position; }
        Vec getPositionFromLookAt() { return _position - _lookAt; }
        Vec getLookAt() { return _lookAt; }
        Vec getForward() { return _forward; }
        Vec getUp() { return _up; }
        Vec getLeft() { return _left; }
        void setPosition(Vec p) { _position = p; updateAngleX(); }
        void setLookAt(Vec p) { _lookAt = p; updateAngleX(); }
    private:
        void updateAngleX()
        {
            _angleX = Vec::angle(Vec(_position.x, _position.y + 1, _position.z), _position, _lookAt);
        }
    };

    struct PointXYZRGB
    {
        double x, y, z;
        double r, g, b;
        PointXYZRGB() : x(0), y(0), z(0), r(0), g(0), b(0) {}
        PointXYZRGB(double x_, double y_, double z_) : x(x_), y(y_), z(z_), r(0), g(0), b(0) {}
        PointXYZRGB(double x_, double y_, double z_, double r_, double g_, double b_) : x(x_), y(y_), z(z_), r(r_), g(g_), b(b_) {}
        PointXYZRGB(Vec v, double r_, double g_, double b_) : x(v.x), y(v.y), z(v.z), r(r_), g(g_), b(b_) {}
        PointXYZRGB(Vec v, double c) : x(v.x), y(v.y), z(v.z), r(c), g(c), b(c) {}
    };

    class CloudViewer
    {
        std::mutex _cloudMx;
        std::vector _cloud;
        bool _rotate, _translate, _zoom;
        int _startx, _starty;
        TrackballCamera _camera;
        GLfloat _pointSize;
        double _fovX, _fovY;
        char _title[256];
        std::function _idle;
        std::function _exit;
    public:
        static CloudViewer *_this;
    public:
        CloudViewer(const char *title = "DUO 3D Viewer")
        {
            strcpy(_title, title);
            _this = this;
            _camera = TrackballCamera(Vec(0.0, 0.0, 0.5), Vec(0.0, 0.0, 0.0));
            _pointSize = 3;
            _translate = _rotate = _zoom = false;
        }
        virtual ~CloudViewer() {}
        void addData(const Mat1b &image, const Mat3f &depth)
        {
            _cloudMx.lock();
            _cloud.clear();
            for(int y = 0; y < image.rows; y++)
                for(int x = 0; x < image.cols; x++)
                {
                    Point p = Point(x, y);
                    double c = image.at(p) / 255.0;
                    if(c == 0) continue;
                    Vec v(depth.at(p)[0], depth.at(p)[1], depth.at(p)[2]);
                    if(v.z < 10000)
                    {
                        v /= 1000.0;
                        _cloud.push_back(PointXYZRGB(v, c));
                    }
                }
            _cloudMx.unlock();
        }
        void setFov(double fovX, double fovY)
        {
             _fovX = fovX / 180.0 * 3.14159265358979323846;
             _fovY = fovY / 180.0 * 3.14159265358979323846;
        }
        void onIdle(std::function idle)
        {
            _idle = idle;
        }
        void onExit(std::function exit)
        {
            _exit = exit;
        }
        void run()
        {
            init();
            atexit([](){ _this->exit(); });
            glutMainLoop();
        }
    private:
        void init()
        {
            int argc = 1;
            char *argv[1] = { _title };
            glutInit(&argc, argv);
            glutInitWindowSize(640, 480);
            glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA | GLUT_DEPTH);
            glutCreateWindow(_title);
            glEnable(GL_DEPTH_TEST);
            glMatrixMode(GL_PROJECTION);
            gluPerspective(75.0, 1.0, 0.001, 20000.0);
            glMatrixMode(GL_MODELVIEW);
            gluLookAt(0.0, 0.0, 6.0, 0.0, 0.0, 0.0, 0.0, 0.1, 0.0);
            glShadeModel(GL_SMOOTH);
            glDepthFunc(GL_LEQUAL);
            glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST);
            glutDisplayFunc([](){ if(glutGetWindow()) _this->draw(); });
            glutMouseFunc([](int button, int state, int x, int y){ if(glutGetWindow()) _this->mouse(button, state, x, y); });
            glutKeyboardFunc([](unsigned char c, int x, int y){ if(glutGetWindow()) _this->key(c, x, y); });
            glutMotionFunc([](int x, int y){ if(glutGetWindow()) _this->motion(x, y); });
            glutReshapeFunc([](int width, int height){ if(glutGetWindow()) _this->reshape(width, height); });
            glutIdleFunc([](){ _this->idle(); });
            glClearDepth(1.0);
            glEnable(GL_DEPTH_TEST);
            glDepthFunc(GL_LEQUAL);
            glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST);
        }
        void idle()
        {
            if(_idle)
            {
                _idle();
                if(glutGetWindow()) 
                    glutPostRedisplay();
            }
            if(glutGetWindow() == 0)
                _exit();
        }
        void exit()
        {
            if(_exit) 
                _exit(); 
        }
        void draw()
        {
            glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
            glLoadIdentity();
            glPushMatrix();
            _camera.update();
            _camera.show();
            glEnable(GL_BLEND);
            glDisable(GL_LIGHTING);
            glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
            glClearColor(0.12f, 0.12f, 0.12f, 1.0f);
            drawPointCloud();
            drawOrigin(_camera.getLookAt());
            drawFOV();
            glColor4d(0, 0, 1, 0.75);
            glutSolidCube(0.05);
            glPopMatrix();
            glutSwapBuffers();
        }
        void mouse(int button, int state, int x, int y)
        {
            if (button == GLUT_LEFT_BUTTON)
            {
                if (state == GLUT_DOWN)
                {
                    _rotate = true;
                    _startx = x;
                    _starty = y;
                }
                if (state == GLUT_UP)
                    _rotate = false;
            }
            else if (button == GLUT_RIGHT_BUTTON)
            {
                if (state == GLUT_DOWN)
                {
                    _translate = true;
                    _startx = x;
                    _starty = y;
                }
                if (state == GLUT_UP)
                    _translate = false;
            }
            else if (button == GLUT_MIDDLE_BUTTON)
            {
                if (state == GLUT_DOWN)
                {
                    _zoom = true;
                    _startx = x;
                    _starty = y;
                }
                if (state == GLUT_UP)
                    _zoom = false;
            }
            // Wheel reports as button 3(scroll up) and button 4(scroll down)
            else if ((button == 3) || (button == 4))
            {
                if (state == GLUT_UP)
                    return;
                if (button == 3)
                    _camera.zoom(0.1);
                else
                    _camera.zoom(-0.1);
            }
        }
        void key(unsigned char c, int x, int y)
        {
            switch (c)
            {
                case '-':
                    if (_pointSize > 0)
                        _pointSize--;
                break;
                case '+':
                    _pointSize++;
                break;
                case ' ':
                    _camera.setPosition(Vec(0.0, 0.0, 0.5));
                    _camera.setLookAt(Vec(0.0, 0.0, 0.0));
                break;
                default:
                break;
            }
            glutPostRedisplay();
        }
        void motion(int x, int y)
        {
            double sensitivity = 100.0;
            if (_translate)
            {
                Vec left = _camera.getLeft();
                Vec up = _camera.getUp();
                _camera.translateAll(left * (x - _startx) / sensitivity);
                _camera.translateAll(up * -(y - _starty) / sensitivity);
                _startx = x;
                _starty = y;
            }
            if (_zoom)
            {
                _camera.zoom((y - _starty) / 5.0);
                _starty = y;
            }
            if (_rotate)
            {
                double rot = y - _starty;
                Vec tmp = Vec(-_camera.getPositionFromLookAt().z, 0, _camera.getPositionFromLookAt().x);
                tmp.normalize();
                _camera.rotate(rot * sensitivity, tmp);
                rot = x - _startx;
                _camera.rotate(-rot * sensitivity, Vec(0.0, 1.0, 0.0));
                _startx = x;
                _starty = y;
            }
            glutPostRedisplay();
        }
        void reshape(int width, int height)
        {
            glViewport(0, 0, width, height);
            glMatrixMode(GL_PROJECTION);
            glLoadIdentity();
            gluPerspective(75.0, (double)width / (double)height, 0.001, 20000.0);
            glMatrixMode(GL_MODELVIEW);
        }
        void drawPointCloud()
        {
            glPointSize(_pointSize);
            glBegin(GL_POINTS);
            _cloudMx.lock();
            for (auto it = _cloud.begin(); it != _cloud.end(); ++it)
            {
                glColor4d(it->r, it->g, it->b, 0.75);
                glVertex3d(it->x, -it->y, -it->z);
            }
            _cloudMx.unlock();
            glEnd();
        }
        void drawOrigin(Vec pos)
        {
            double length = 0.1;
            glBegin(GL_LINES);
            glColor3d(1, 0, 0);
            glVertex3d(pos.x, pos.y, pos.z);
            glVertex3d(pos.x + length, pos.y, pos.z);
            glColor3d(0, 1, 0);
            glVertex3d(pos.x, pos.y, pos.z);
            glVertex3d(pos.x, pos.y + length, pos.z);
            glColor3d(0.25f, 0.25f, 1);
            glVertex3d(pos.x, pos.y, pos.z);
            glVertex3d(pos.x, pos.y, pos.z + length);
            glEnd();
        }
        void drawFOV()
        {
            double z = 0.1;
            double x = tan(_fovX / 2.0) * z;
            double y = tan(_fovY / 2.0) * z;
            glBegin(GL_LINES);
            glColor3d(0.2, 0.2, 0.2);
            glVertex3d(0.0, 0.0, 0.0);    glVertex3d(-x, -y, -z);
            glVertex3d(0.0, 0.0, 0.0);    glVertex3d(x, -y, -z);
            glVertex3d(0.0, 0.0, 0.0);    glVertex3d(-x, y, -z);
            glVertex3d(0.0, 0.0, 0.0);    glVertex3d(x, y, -z);
            glVertex3d(-x, -y, -z);        glVertex3d(x, -y, -z);
            glVertex3d(x, -y, -z);        glVertex3d(x, y, -z);
            glVertex3d(x, y, -z);        glVertex3d(-x, y, -z);
            glVertex3d(-x, y, -z);        glVertex3d(-x, -y, -z);
            glEnd();
        }
    };

    CloudViewer *CloudViewer::_this = NULL;

    typedef struct
    {
        Mat leftImg;
        Mat depth;
    }D3DFrame;

    class Dense3DFrameQueue 
    {
        std::mutex mutex;
        std::queue queue;
    public:
        void push(D3DFrame frame)
        {
            mutex.lock();
            queue.push(frame);
            mutex.unlock();
        }
        bool pop(D3DFrame &frame)
        {
            mutex.lock();
            if(queue.size() == 0)
            {
                mutex.unlock();
                return false;
            }
            frame = queue.front();
            queue.pop();
            mutex.unlock();
            return true;
        }
    };
}
#endif // SAMPLE_H

Sample.cpp


To implement the 3D point cloud viewer we go through the following steps:

  • Step 1 - Create Dense3DMT Instance
  • Step 2 - Configure Dense3D and DUO Parameters
  • Step 3 - Start Dense3DMT capturing and processing
  • Step 4 - Create CloudViewer and set camera FOV
  • Step 5 - Setup CloudViewer onIdle and onExit callbacks
  • Step 6 - Render the generated 3D points with CloudViewer (inside onIdle callback)

#include "Sample.h"
using namespace cl;

#define WIDTH    320
#define HEIGHT    240
#define FPS        30

int main(int argc, char* argv[])
{
    printf("Dense3D Point Cloud Program\n");

    Dense3DMTInstance dense3d;
    if(!Dense3DOpen(&dense3d))
    {
        printf("Could not open Dense3DMT\n");
        return 1;
    }
    
    DUOInstance duo = GetDUOInstance(dense3d);

    char tmp[260];
    GetDUODeviceName(duo,tmp);
    printf("DUO Device Name:      '%s'\n", tmp);
    GetDUOSerialNumber(duo, tmp);
    printf("DUO Serial Number:    %s\n", tmp);
    GetDUOFirmwareVersion(duo, tmp);
    printf("DUO Firmware Version: v%s\n", tmp);
    GetDUOFirmwareBuild(duo, tmp);
    printf("DUO Firmware Build:   %s\n", tmp);
    printf("DUOLib Version:       v%s\n", GetDUOLibVersion());
    printf("Dense3DMT Version:    v%s\n", Dense3DGetLibVersion());

    // Set the Dense3D license (visit https://duo3d.com/account)
    if(!SetDense3DLicense(dense3d, "XXXXX-XXXXX-XXXXX-XXXXX-XXXXX")) // <-- Put your Dense3D license
    {
        printf("Invalid or missing Dense3D license. To get your license visit https://duo3d.com/account\n");
        // Close Dense3D library
        Dense3DClose(dense3d);
        return 1;
    }

    if(!SetDense3DImageInfo(dense3d, WIDTH, HEIGHT, FPS))
    {
        printf("SetDense3DImageInfo error\n");
        // Close Dense3D library
        Dense3DClose(dense3d);
        return 1;
    }

    Dense3DParams params;
    params.scale = 3;
    params.mode = 0;
    params.numDisparities = 2;
    params.sadWindowSize = 6;
    params.preFilterCap = 28;
    params.uniqenessRatio = 27;
    params.speckleWindowSize = 52;
    params.speckleRange = 14;
    if(!SetDense3Params(dense3d, params))
    {
        printf("GetDense3Params error\n");
        // Close Dense3D library
        Dense3DClose(dense3d);
        return 1;
    }

    // Queue used to receive Dense3D frames
    Dense3DFrameQueue d3dq;

    if(!Dense3DStart(dense3d, [](const PDense3DFrame pFrameData, void *pUserData)
            {
                        D3DFrame frame;
                        Size frameSize(pFrameData->duoFrame->width, pFrameData->duoFrame->height);
                        frame.leftImg = Mat(frameSize, CV_8U, pFrameData->duoFrame->leftData);
                        frame.depth = Mat(frameSize, CV_32FC3, pFrameData->depthData);
                        ((Dense3DFrameQueue*)pUserData)->push(frame);
            }, &d3dq))
    {
        printf("Dense3DStart error\n");
        return 1;
    }

    // Set exposure, LED brightness and camera orientation
    SetDUOExposure(duo, 85);
    SetDUOLedPWM(duo, 28);
    SetDUOVFlip(duo, false);

    DUOResolutionInfo ri;
    GetDUOResolutionInfo(duo, &ri);
    double fov[4];
    GetDUORectifiedFOV(duo, fov);

    CloudViewer viewer;
    viewer.setFov(fov[0], fov[1]);

    // Setup idle callback
    viewer.onIdle([&]()
    {
        D3DFrame d3DFrame;
        if(!d3dq.pop(d3DFrame))
            return;

        // Update point cloud
        viewer.addData(d3DFrame.leftImg, d3DFrame.depth);
    });
    viewer.onExit([&]()
    {
        Dense3DStop(dense3d);
         Dense3DClose(dense3d);
    });

    // Run viewer loop
    viewer.run();
    return 0;
}

Resources


Related


How can we help?  

Was this helpful? 0



Home -  Company -  Products -  Solutions -  Showcase -  Support -  Contact © Copyright 2018 – DUO3D™ Code Laboratories, Inc.