Newer
Older
VolumeRendering_in_Unity / Assets / 5. Transfer Function / Shaders / VolumeRendering_teratail.shader
Shader "VolumeRendering/VolumeRendering_teratail"
{
	Properties
	{
		[Header(Rendering)]
		_Volume("Volume", 3D) = "" {}
		_Color("Color", Color) = (1, 1, 1, 1)
		_Iteration("Iteration", Int) = 100
		_Intensity("Intensity", Range(0.0, 1.0)) = 0.1

		[Header(Ranges)]
		_MinX("MinX", Range(0, 1)) = 0.0
		_MaxX("MaxX", Range(0, 1)) = 1.0
		_MinY("MinY", Range(0, 1)) = 0.0
		_MaxY("MaxY", Range(0, 1)) = 1.0
		_MinZ("MinZ", Range(0, 1)) = 0.0
		_MaxZ("MaxZ", Range(0, 1)) = 1.0
	}

		CGINCLUDE
#include "UnityCG.cginc"

			struct appdata
		{
			float4 vertex : POSITION;
		};

		struct v2f
		{
			float4 vertex : SV_POSITION;
			float3 localPos : TEXCOORD0;
			float3 worldPos : TEXCOORD1;
			float3 localViewDir : TEXCOORD2;
			float3 worldViewDir : TEXCOORD3;
			float4 screenPos : TEXCOORD4;
		};

		sampler3D _Volume;
		fixed4 _Color;
		int _Iteration;
		fixed _Intensity;
		fixed _MinX, _MaxX, _MinY, _MaxY, _MinZ, _MaxZ;

#define INTEGRATION_THRESHOLD 0.01
#define DIRECTIONAL_EPSILON 0.0009765625
#define ALPHA_THRESHOLD 0.01

		sampler2D _CameraDepthTexture;

		// キューブに関するtransform.InverseTransformPoint
		float3 worldToObjectPos(float3 worldPos) { return mul(unity_WorldToObject, float4(worldPos, 1.0)).xyz; }

		// キューブに関するtransform.InverseTransformVector
		float3 worldToObjectVec(float3 worldVec) { return mul((float3x3)unity_WorldToObject, worldVec); }

		// キューブに関するtransform.TransformVector
		float3 objectToWorldVec(float3 localVec) { return mul((float3x3)unity_ObjectToWorld, localVec); }

		// カメラのtransform.position
		float3 getWorldCameraPos() { return UNITY_MATRIX_I_V._14_24_34; }

		// カメラのtransform.forward
		float3 getWorldCameraDir() { return -UNITY_MATRIX_V._31_32_33; }

		// カメラのnearClipPlane
		float getWorldCameraNear() { return _ProjectionParams.y; }

		// カメラ投影法が透視投影かどうか
		bool isPerspective() { return any(UNITY_MATRIX_P._41_42_43); }

		// カメラが透視投影ならカメラの位置、平行投影ならカメラ位置を通る平面上のworldPosを正面にとらえる位置
		float3 getWorldCameraOrigin(float3 worldPos)
		{
			return isPerspective()
				? getWorldCameraPos()
				: worldPos + dot(getWorldCameraPos() - worldPos, getWorldCameraDir()) * getWorldCameraDir();
		}

		// キューブのtransform.position
		float3 getWorldBoundsCenter() { return unity_ObjectToWorld._14_24_34; }

		// カメラの位置からキューブを見て、2枚の平面で前後にキューブを挟んだ時の長さを求める
		float getWorldBoundsDepth(float3 worldCameraDir)
		{
			float3 worldCornerVec1 = objectToWorldVec(float3(0.5, 0.5, 0.5));
			float3 worldCornerVec2 = objectToWorldVec(float3(-0.5, 0.5, 0.5));
			float3 worldCornerVec3 = objectToWorldVec(float3(0.5, -0.5, 0.5));
			float3 worldCornerVec4 = objectToWorldVec(float3(-0.5, -0.5, 0.5));
			float2 lengths1 = abs(float2(dot(worldCornerVec1, worldCameraDir), dot(worldCornerVec2, worldCameraDir)));
			float2 lengths2 = abs(float2(dot(worldCornerVec3, worldCameraDir), dot(worldCornerVec4, worldCameraDir)));
			float2 lengths = max(lengths1, lengths2);
			return max(lengths.x, lengths.y) * 2.0;
		}

		// 上記のように挟んだ時の平面の位置を求める
		float2 getWorldBoundsNearFar(float worldBoundsDepth)
		{
			float center = isPerspective()
				? distance(getWorldBoundsCenter(), getWorldCameraPos())
				: dot(getWorldBoundsCenter() - getWorldCameraPos(), getWorldCameraDir());
			return float2(-0.5, 0.5) * worldBoundsDepth + center;
		}

		// キューブの面に関するPlane.Raycastを3方向まとめて行う
		float3 getLocalBoundsFaces(float3 localPos, float3 localViewDir, float faceOffset)
		{
			float3 signs = sign(localViewDir);
			return -(signs * localPos + faceOffset) / (abs(localViewDir) + (1.0 - abs(signs)) * DIRECTIONAL_EPSILON);
		}

		// キューブ前面までの距離を求める
		float getLocalBoundsFrontFace(float3 localPos, float3 localViewDir)
		{
			float3 lengths = getLocalBoundsFaces(localPos, localViewDir, 0.5);
			return max(max(max(lengths.x, lengths.y), lengths.z), 0.0);
		}

		// デプステクスチャをもとに他の不透明オブジェクトのZ位置を算出する
		// キューブ内に他の不透明オブジェクトが貫入している場合に対応するため使用する
		float sampleOpaqueZ(float4 screenPos)
		{
			float rawDepth = SAMPLE_DEPTH_TEXTURE_PROJ(_CameraDepthTexture, UNITY_PROJ_COORD(screenPos));
			return isPerspective()
				? LinearEyeDepth(rawDepth)
				: -dot(unity_CameraInvProjection._33_34, float2(_ProjectionParams.x * (rawDepth * 2.0 - 1.0), 1.0));
		}

		fixed sample(float3 pos)
		{
			fixed x = step(pos.x, _MaxX) * step(_MinX, pos.x);
			fixed y = step(pos.y, _MaxY) * step(_MinY, pos.y);
			fixed z = step(pos.z, _MaxZ) * step(_MinZ, pos.z);
			return tex3D(_Volume, pos).a * x * y * z;
		}

		v2f vert(appdata v)
		{
			v2f o;
			o.vertex = UnityObjectToClipPos(v.vertex);
			o.worldPos = mul(unity_ObjectToWorld, v.vertex);
			o.localPos = v.vertex.xyz;
			o.worldViewDir = isPerspective() ? -UnityWorldSpaceViewDir(o.worldPos) : getWorldCameraDir();
			o.localViewDir = worldToObjectVec(o.worldViewDir);
			o.screenPos = ComputeScreenPos(o.vertex);
			return o;
		}

		fixed4 frag(v2f i) : SV_Target
		{
			// カメラのニアプレーンから他の不透明オブジェクト表面までの範囲で、かつキューブ内である範囲を求める
			float3 worldViewDir = normalize(i.worldViewDir);
			float3 localViewDir = normalize(i.localViewDir);
			float peripheralFactor = 1.0 / dot(getWorldCameraDir(), worldViewDir);
			float3 worldCameraOrigin = getWorldCameraOrigin(i.worldPos);
			float3 localCameraOrigin = worldToObjectPos(worldCameraOrigin);
			float worldFrontFace = dot(objectToWorldVec(getLocalBoundsFrontFace(localCameraOrigin, localViewDir) * localViewDir), worldViewDir);
			float worldBackFace = dot(i.worldPos - worldCameraOrigin, worldViewDir);
			float worldCameraNear = getWorldCameraNear();
			float worldOpaque = sampleOpaqueZ(i.screenPos) * peripheralFactor;
			float worldEnter = max(worldFrontFace, worldCameraNear);
			float worldExit = min(worldBackFace, worldOpaque);

			// 不要な領域をクリッピングする
			clip(worldExit - worldEnter);

			// 視線方向の色積分の開始・終了ステップ番号を求める
			float worldBoundsDepth = getWorldBoundsDepth(getWorldCameraDir());
			float2 worldBoundsNear = getWorldBoundsNearFar(worldBoundsDepth).x;
			float worldLineOrigin = worldBoundsNear * peripheralFactor;
			float worldLineLength = worldBoundsDepth * peripheralFactor;
			float worldStepLength = worldLineLength / _Iteration;
			float worldLineOriginToEnter = worldEnter - worldLineOrigin;
			float worldLineOriginToExit = worldExit - worldLineOrigin;
			int startingIndex = (int)ceil(worldLineOriginToEnter / worldStepLength);
			int terminalIndex = (int)(worldLineOriginToExit / worldStepLength);

			float3 localStep = worldToObjectVec(worldStepLength * worldViewDir);
			float3 localLineOriginPos = worldToObjectPos(worldCameraOrigin + worldViewDir * worldLineOrigin);
			fixed output = 0.0;

			[loop]
			for (int j = startingIndex; j <= terminalIndex; j++)
			{
				float3 lpos = localLineOriginPos + j * localStep;
				fixed a = sample(lpos + 0.5);
				if (a < ALPHA_THRESHOLD)
				{
					continue;
				}
				output += (1.0 - output) * a * _Intensity;
				if (output > 1.0 - INTEGRATION_THRESHOLD)
				{
					break;
				}
			}

			return _Color * output;
		}
			ENDCG

			SubShader
		{
			Tags
			{
				"Queue" = "Transparent"
				"RenderType" = "Transparent"
			}

				Pass
			{
				Cull Front
				ZWrite Off
				ZTest Always
				Blend One OneMinusSrcAlpha
				Lighting Off

				CGPROGRAM
				#pragma vertex vert
				#pragma fragment frag
				ENDCG
			}
		}
}