13
10

Delete article

Deleted articles cannot be recovered.

Draft of this article would be also deleted.

Are you sure you want to delete this article?

More than 5 years have passed since last update.

KinectのDepthをUnityのシェーダーでPointCloud的に描画する

Posted at

#背景
Kinectを使ったインタラクションゲームを作る際にDepthがUnityの空間内にどう分布しているかを見れるといいなと思ってUnity内にDepthをポイントクラウド的に描画できる機能を作りました。

#深度データからUnityの3次元座標系への変換

深度データの配列からCameraSpacePoint配列に変換するのが以下のメソッド。これでDepth配列を変換するとCameraSpace空間上の座標の配列が得られる。

KinectSensor.CoordinateMapper.MapDepthFrameToCameraSpace (ushort[] depths, CameraSpacePoint[] cameraSpacePoints);

CameraSpace空間はKinectを原点とし、Kinectから見て左が x 軸正、上が y 軸正、前方が z 軸正の右手系なので、Unityとはx軸の方向が異なるためX座標に-1をかけて変換する必要がある。

Vector3 unitypoint = new Vector3(-cameraSpace.X, cameraSpace.Y, cameraSpace.Z);

#描画する為のソースコード

得られたUnity座標をUnity内に描画する為のスクリプトを書きます。描画する方法は幾つかありますが、この記事では頂点シェーダーにポイントクラウドの座標を渡してジオメトリシェーダーでビルボード描画する手法をとります。やり方は下記の方法を参考にというか、ほぼそのまま採用しています。

[Unity]コンピュートシェーダ(GPGPU)で1万個のパーティクルを動かす

下記がKinectからデプス情報を取得してCameraSpace空間に変換しシェーダーに渡すC#側のソースです。C#側からshader側にCameraSpace空間座標の配列をComputeBuffer経由で渡しています。

DepthViewer.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using System.Runtime.InteropServices;
using System;
using Windows.Kinect;


/// <summary>
/// Depth座標
/// </summary>
public struct Float3
{
    public float x;
    public float y;
    public float z;

    public Float3(Vector3 vec)
    {
        this.x = vec.x;
        this.y = vec.y;
        this.z = vec.z;
    }
}


/// <summary>
/// GPUDepth
/// </summary>
public class DepthViewer : MonoBehaviour
{


    public bool isDisp
    {
        get;
        set;
    }


    [SerializeField] Shader _depthRenderShader;

    [SerializeField] Texture _depthPointTexture;

    private KinectSensor _Sensor;

    private CoordinateMapper _Mapper;

    private DepthFrameReader _DepthReader;

    private ComputeBuffer _depthPointBuffer;

    private Material _depthPointMaterial;

    private ushort[] _depths;

    private CameraSpacePoint[] _cameraSpacePoints;

    private int _width, _height;


    IEnumerator Start()
    {
        _depthPointMaterial = new Material(_depthRenderShader);

        //kinect 初期化
        _Sensor = KinectSensor.GetDefault();

        if (_Sensor != null)
        {
            _Mapper = _Sensor.CoordinateMapper;
            if (!_Sensor.IsOpen)
            {
                _Sensor.Open();
                _DepthReader = _Sensor.DepthFrameSource.OpenReader();
                var frameDesc = _Sensor.DepthFrameSource.FrameDescription;
                _depths = new ushort[frameDesc.Width * frameDesc.Height];
                _cameraSpacePoints = new CameraSpacePoint[frameDesc.Width * frameDesc.Height];
                InitDispDepth(frameDesc.Width, frameDesc.Height);
            }
        }

        

        var wait = new WaitForSeconds(1f / 30f);
        while (true)
        {

            //kinect Depth取得
            if (_DepthReader != null)
            {
                var frame = _DepthReader.AcquireLatestFrame();
                if (frame != null)
                {
                    frame.CopyFrameDataToArray(_depths);

                    frame.Dispose();
                    frame = null;
                }
            }

            //kinect Depth描画
            if (_depths.Length > 0)
            {
                //depthをCameraSpace座標に変換
                _Mapper.MapDepthFrameToCameraSpace(_depths, _cameraSpacePoints);

                //ConputeBufferにセット
                _depthPointBuffer.SetData(_cameraSpacePoints);
            }

            yield return wait;
        }
    }


    void OnDestroy()
    {
        if (_depthPointBuffer != null)
        {
            _depthPointBuffer.Release();
        }

        if (_DepthReader != null)
        {
            _DepthReader.Dispose();
            _DepthReader = null;
        }

        if (_Sensor != null)
        {
            if (_Sensor.IsOpen)
            {
                _Sensor.Close();
            }

            _Sensor = null;
        }

    }


    /// <summary>
    /// 初期化
    /// </summary>
    /// <param name="width"></param>
    /// <param name="height"></param>
    void InitDispDepth(int width, int height)
    {
        isDisp = true;
        _width = width;
        _height = height;
        _depthPointBuffer = new ComputeBuffer(width * height, Marshal.SizeOf(typeof(Float3)));
    }


    void OnRenderObject()
    {
        if (!isDisp)
        {
            return;
        }

        //Depth描画
        _depthPointMaterial.SetTexture("_MainTex", _depthPointTexture);
        _depthPointMaterial.SetBuffer("Points", _depthPointBuffer);
        _depthPointMaterial.SetPass(0);
        Graphics.DrawProcedural(MeshTopology.Points, _depthPointBuffer.count);
    }

}

下記がC#側からComputeBuffer経由で渡されたCameraSpace座標をUnity座標系に変換してビルボード表示するshaderです。

DepthViewShader.shader
  Shader "Custom/DepthViewShader" {
	SubShader {
		
        ZWrite On
        Blend SrcAlpha OneMinusSrcAlpha
        
        Pass {
	        CGPROGRAM
	        
	        // シェーダーモデルは5.0を指定
	        #pragma target 5.0
	        
	        // シェーダー関数を設定 
	        #pragma vertex vert
			#pragma geometry geom
	        #pragma fragment frag
	         
	        #include "UnityCG.cginc"
	        
	        
        	sampler2D _MainTex;

        	struct Float3
        	{
        		float X;
        		float Y;
        		float Z;
        	};
        	
        	
			//StructuredBuffer配列。Depth情報
        	StructuredBuffer<Float3> Points;
	        
	      
	        struct VSOut {
	            float4 pos : SV_POSITION;
	            float2 tex : TEXCOORD0;
	            float4 col : COLOR;
	        };

			float3 HUEtoRGB(in float  H)
			{
				float R = abs(H * 6 - 3) - 1;
				float G = 2 - abs(H * 6 - 2);
				float B = 2 - abs(H * 6 - 4);
				return saturate(float3(R, G, B));
			}

			float3 HSVtoRGB(in float3 HSV)
			{
				float3 RGB = HUEtoRGB(HSV.x);
				return ((RGB - 1) * HSV.y + 1) * HSV.z;
			}
	        
	       
			VSOut vert (uint id : SV_VertexID)
	       	{
	       		
	            VSOut output;
	            output.pos = float4(-Points[id].X, Points[id].Y, Points[id].Z , 1);
	            output.tex = float2(0, 0);
				float dist = length(output.pos);
				float3 hsv = float3(1, 1, 1);
				hsv.x = dist % 1.0;
				hsv.y = dist % 1.0;
				float3 rgb = HSVtoRGB(hsv);
				output.col = float4(rgb.xyz, 1);
	            return output;
	       	}
	       	
			//ビルボード
		   	[maxvertexcount(4)]
		   	void geom (point VSOut input[1], inout TriangleStream<VSOut> outStream)
		   	{
		     	VSOut output;
		     	
		      	float4 pos = input[0].pos; 
		      	float4 col = input[0].col;
		     	
		      	for(int x = 0; x < 2; x++)
		      	{
			      	for(int y = 0; y < 2; y++)
			      	{
			      		float4x4 billboardMatrix = UNITY_MATRIX_V;
			      		billboardMatrix._m03 = 
			      		billboardMatrix._m13 = 
			      		billboardMatrix._m23 =
			      		billboardMatrix._m33 = 0;
			      		
			      		float2 tex = float2(x, y);
			        	output.tex = tex;
			      		
				      	output.pos = pos + mul(float4((tex * 2 - float2(1, 1)) * 0.005, 0, 1), billboardMatrix);
			          	output.pos = mul (UNITY_MATRIX_VP, output.pos);
			          	
			        	output.col = col;
				      	outStream.Append (output);
			      	}
		      	}
		      	
		      	outStream.RestartStrip();
		   	}
			
	        fixed4 frag (VSOut i) : COLOR
	        {
	        	float4 col = tex2D(_MainTex, i.tex) * i.col;
	        	
		        if(col.a < 0.3) discard;
		        
	            return col;
	        }
	         
	        ENDCG
	     } 
     }
 }

描画結果

GameView,SceneView内でDepthとGameObjectとの位置関係を一目できるのでデバッグにとても便利です。

13
10
1

Register as a new user and use Qiita more conveniently

  1. You get articles that match your needs
  2. You can efficiently read back useful information
  3. You can use dark theme
What you can do with signing up
13
10

Delete article

Deleted articles cannot be recovered.

Draft of this article would be also deleted.

Are you sure you want to delete this article?