浏览代码

Merge pull request #14847 from almarklein/volumeexample

Volume rendering example using 3D texture
Mr.doob 6 年之前
父节点
当前提交
c98820c985

+ 1 - 0
examples/files.js

@@ -173,6 +173,7 @@ var files = {
 		"webgl_materials_texture_manualmipmap",
 		"webgl_materials_texture_partialupdate",
 		"webgl_materials_texture_rotation",
+		"webgl_materials_texture3d_volume1",
 		"webgl_materials_translucency",
 		"webgl_materials_transparency",
 		"webgl_materials_variations_basic",

+ 324 - 0
examples/js/shaders/VolumeShader.js

@@ -0,0 +1,324 @@
+/**
+ * @author Almar Klein / http://almarklein.org
+ *
+ * Shaders to render 3D volumes using raycasting.
+ * The applied techniques are based on similar implementations in the Visvis and Vispy projects.
+ * This is not the only approach, therefore it's marked 1.
+ */
+
+THREE.VolumeRenderShader1 = {
+	uniforms: {
+        "u_size": { value: [1, 1, 1] },
+        "u_renderstyle": { value: 0 },
+        "u_renderthreshold": { value: 0.5 },
+        "u_clim": { value: [0.0, 1.0] },
+        "u_data": { value: null },
+        "u_cmdata": { value: null }
+    },
+    vertexShader: [
+        'varying vec4 v_nearpos;',
+        'varying vec4 v_farpos;',
+        'varying vec3 v_position;',
+
+        'mat4 inversemat(mat4 m) {',
+            // Taken from https://github.com/stackgl/glsl-inverse/blob/master/index.glsl
+            // This function is licenced by the MIT license to Mikola Lysenko
+            'float',
+            'a00 = m[0][0], a01 = m[0][1], a02 = m[0][2], a03 = m[0][3],',
+            'a10 = m[1][0], a11 = m[1][1], a12 = m[1][2], a13 = m[1][3],',
+            'a20 = m[2][0], a21 = m[2][1], a22 = m[2][2], a23 = m[2][3],',
+            'a30 = m[3][0], a31 = m[3][1], a32 = m[3][2], a33 = m[3][3],',
+
+            'b00 = a00 * a11 - a01 * a10,',
+            'b01 = a00 * a12 - a02 * a10,',
+            'b02 = a00 * a13 - a03 * a10,',
+            'b03 = a01 * a12 - a02 * a11,',
+            'b04 = a01 * a13 - a03 * a11,',
+            'b05 = a02 * a13 - a03 * a12,',
+            'b06 = a20 * a31 - a21 * a30,',
+            'b07 = a20 * a32 - a22 * a30,',
+            'b08 = a20 * a33 - a23 * a30,',
+            'b09 = a21 * a32 - a22 * a31,',
+            'b10 = a21 * a33 - a23 * a31,',
+            'b11 = a22 * a33 - a23 * a32,',
+
+            'det = b00 * b11 - b01 * b10 + b02 * b09 + b03 * b08 - b04 * b07 + b05 * b06;',
+
+        'return mat4(',
+            'a11 * b11 - a12 * b10 + a13 * b09,',
+            'a02 * b10 - a01 * b11 - a03 * b09,',
+            'a31 * b05 - a32 * b04 + a33 * b03,',
+            'a22 * b04 - a21 * b05 - a23 * b03,',
+            'a12 * b08 - a10 * b11 - a13 * b07,',
+            'a00 * b11 - a02 * b08 + a03 * b07,',
+            'a32 * b02 - a30 * b05 - a33 * b01,',
+            'a20 * b05 - a22 * b02 + a23 * b01,',
+            'a10 * b10 - a11 * b08 + a13 * b06,',
+            'a01 * b08 - a00 * b10 - a03 * b06,',
+            'a30 * b04 - a31 * b02 + a33 * b00,',
+            'a21 * b02 - a20 * b04 - a23 * b00,',
+            'a11 * b07 - a10 * b09 - a12 * b06,',
+            'a00 * b09 - a01 * b07 + a02 * b06,',
+            'a31 * b01 - a30 * b03 - a32 * b00,',
+            'a20 * b03 - a21 * b01 + a22 * b00) / det;',
+        '}',
+
+
+        'void main() {',
+            // Prepare transforms to map to "camera view". See also:
+            // https://threejs.org/docs/#api/renderers/webgl/WebGLProgram
+            'mat4 viewtransformf = viewMatrix;',
+            'mat4 viewtransformi = inversemat(viewMatrix);',
+
+            // Project local vertex coordinate to camera position. Then do a step
+            // backward (in cam coords) to the near clipping plane, and project back. Do
+            // the same for the far clipping plane. This gives us all the information we
+            // need to calculate the ray and truncate it to the viewing cone.
+            'vec4 position4 = vec4(position, 1.0);',
+            'vec4 pos_in_cam = viewtransformf * position4;',
+
+            // Intersection of ray and near clipping plane (z = -1 in clip coords)
+            'pos_in_cam.z = -pos_in_cam.w;',
+            'v_nearpos = viewtransformi * pos_in_cam;',
+
+            // Intersection of ray and far clipping plane (z = +1 in clip coords)
+            'pos_in_cam.z = pos_in_cam.w;',
+            'v_farpos = viewtransformi * pos_in_cam;',
+
+            // Set varyings and output pos
+            'v_position = position;',
+            'gl_Position = projectionMatrix * viewMatrix * modelMatrix * position4;',
+        '}',
+    ].join( '\n' ),
+	fragmentShader: [
+        'precision highp float;',
+        'precision mediump sampler3D;',
+
+        'uniform vec3 u_size;',
+        'uniform int u_renderstyle;',
+        'uniform float u_renderthreshold;',
+        'uniform vec2 u_clim;',
+
+        'uniform sampler3D u_data;',
+        'uniform sampler2D u_cmdata;',
+
+        'varying vec3 v_position;',
+        'varying vec4 v_nearpos;',
+        'varying vec4 v_farpos;',
+
+        // The maximum distance through our rendering volume is sqrt(3).
+        'const int MAX_STEPS = 887;  // 887 for 512^3, 1774 for 1024^3',
+        'const int REFINEMENT_STEPS = 4;',
+        'const float relative_step_size = 1.0;',
+        'const vec4 ambient_color = vec4(0.2, 0.4, 0.2, 1.0);',
+        'const vec4 diffuse_color = vec4(0.8, 0.2, 0.2, 1.0);',
+        'const vec4 specular_color = vec4(1.0, 1.0, 1.0, 1.0);',
+        'const float shininess = 40.0;',
+
+        'void cast_mip(vec3 start_loc, vec3 step, int nsteps, vec3 view_ray);',
+        'void cast_iso(vec3 start_loc, vec3 step, int nsteps, vec3 view_ray);',
+
+        'float sample1(vec3 texcoords);',
+        'vec4 apply_colormap(float val);',
+        'vec4 add_lighting(float val, vec3 loc, vec3 step, vec3 view_ray);',
+
+
+        'void main() {',
+            // Normalize clipping plane info
+            'vec3 farpos = v_farpos.xyz / v_farpos.w;',
+            'vec3 nearpos = v_nearpos.xyz / v_nearpos.w;',
+
+            // Calculate unit vector pointing in the view direction through this fragment.
+            'vec3 view_ray = normalize(nearpos.xyz - farpos.xyz);',
+
+            // Compute the (negative) distance to the front surface or near clipping plane.
+            // v_position is the back face of the cuboid, so the initial distance calculated in the dot
+            // product below is the distance from near clip plane to the back of the cuboid
+            'float distance = dot(nearpos - v_position, view_ray);',
+            'distance = max(distance, min((-0.5 - v_position.x) / view_ray.x,',
+                                        '(u_size.x - 0.5 - v_position.x) / view_ray.x));',
+            'distance = max(distance, min((-0.5 - v_position.y) / view_ray.y,',
+                                        '(u_size.y - 0.5 - v_position.y) / view_ray.y));',
+            'distance = max(distance, min((-0.5 - v_position.z) / view_ray.z,',
+                                        '(u_size.z - 0.5 - v_position.z) / view_ray.z));',
+
+                                        // Now we have the starting position on the front surface
+            'vec3 front = v_position + view_ray * distance;',
+
+            // Decide how many steps to take
+            'int nsteps = int(-distance / relative_step_size + 0.5);',
+            'if ( nsteps < 1 )',
+                'discard;',
+
+            // Get starting location and step vector in texture coordinates
+            'vec3 step = ((v_position - front) / u_size) / float(nsteps);',
+            'vec3 start_loc = front / u_size;',
+
+            // For testing: show the number of steps. This helps to establish
+            // whether the rays are correctly oriented
+            //'gl_FragColor = vec4(0.0, float(nsteps) / 1.0 / u_size.x, 1.0, 1.0);',
+            //'return;',
+
+            'if (u_renderstyle == 0)',
+                'cast_mip(start_loc, step, nsteps, view_ray);',
+            'else if (u_renderstyle == 1)',
+                'cast_iso(start_loc, step, nsteps, view_ray);',
+
+            'if (gl_FragColor.a < 0.05)',
+                'discard;',
+        '}',
+
+
+        'float sample1(vec3 texcoords) {',
+            '/* Sample float value from a 3D texture. Assumes intensity data. */',
+            'return texture(u_data, texcoords.xyz).r;',
+        '}',
+
+
+        'vec4 apply_colormap(float val) {',
+            'val = (val - u_clim[0]) / (u_clim[1] - u_clim[0]);',
+            'return texture2D(u_cmdata, vec2(val, 0.5));',
+        '}',
+
+
+        'void cast_mip(vec3 start_loc, vec3 step, int nsteps, vec3 view_ray) {',
+
+            'float max_val = -1e6;',
+            'int max_i = 100;',
+            'vec3 loc = start_loc;',
+
+            // Enter the raycasting loop. In WebGL 1 the loop index cannot be compared with
+            // non-constant expression. So we use a hard-coded max, and an additional condition
+            // inside the loop.
+            'for (int iter=0; iter<MAX_STEPS; iter++) {',
+                'if (iter >= nsteps)',
+                    'break;',
+                // Sample from the 3D texture
+                'float val = sample1(loc);',
+                // Apply MIP operation
+                'if (val > max_val) {',
+                    'max_val = val;',
+                    'max_i = iter;',
+                '}',
+                // Advance location deeper into the volume
+                'loc += step;',
+            '}',
+
+            // Refine location, gives crispier images
+            'vec3 iloc = start_loc + step * (float(max_i) - 0.5);',
+            'vec3 istep = step / float(REFINEMENT_STEPS);',
+            'for (int i=0; i<REFINEMENT_STEPS; i++) {',
+                'max_val = max(max_val, sample1(iloc));',
+                'iloc += istep;',
+            '}',
+
+            // Resolve final color
+            'gl_FragColor = apply_colormap(max_val);',
+        '}',
+
+
+        'void cast_iso(vec3 start_loc, vec3 step, int nsteps, vec3 view_ray) {',
+
+            'gl_FragColor = vec4(0.0);  // init transparent',
+            'vec4 color3 = vec4(0.0);  // final color',
+            'vec3 dstep = 1.5 / u_size;  // step to sample derivative',
+            'vec3 loc = start_loc;',
+
+            'float low_threshold = u_renderthreshold - 0.02 * (u_clim[1] - u_clim[0]);',
+
+            // Enter the raycasting loop. In WebGL 1 the loop index cannot be compared with
+            // non-constant expression. So we use a hard-coded max, and an additional condition
+            // inside the loop.
+            'for (int iter=0; iter<MAX_STEPS; iter++) {',
+                'if (iter >= nsteps)',
+                    'break;',
+
+                    // Sample from the 3D texture
+                'float val = sample1(loc);',
+
+                'if (val > low_threshold) {',
+                // Take the last interval in smaller steps
+                    'vec3 iloc = loc - 0.5 * step;',
+                    'vec3 istep = step / float(REFINEMENT_STEPS);',
+                    'for (int i=0; i<REFINEMENT_STEPS; i++) {',
+                        'val = sample1(iloc);',
+                        'if (val > u_renderthreshold) {',
+                            'gl_FragColor = add_lighting(val, iloc, dstep, view_ray);',
+                            'return;',
+                        '}',
+                        'iloc += istep;',
+                    '}',
+                '}',
+
+                // Advance location deeper into the volume
+                'loc += step;',
+            '}',
+        '}',
+
+
+        'vec4 add_lighting(float val, vec3 loc, vec3 step, vec3 view_ray)',
+        '{',
+            // Calculate color by incorporating lighting
+
+            // View direction
+            'vec3 V = normalize(view_ray);',
+
+            // calculate normal vector from gradient
+            'vec3 N;',
+            'float val1, val2;',
+            'val1 = sample1(loc + vec3(-step[0], 0.0, 0.0));',
+            'val2 = sample1(loc + vec3(+step[0], 0.0, 0.0));',
+            'N[0] = val1 - val2;',
+            'val = max(max(val1, val2), val);',
+            'val1 = sample1(loc + vec3(0.0, -step[1], 0.0));',
+            'val2 = sample1(loc + vec3(0.0, +step[1], 0.0));',
+            'N[1] = val1 - val2;',
+            'val = max(max(val1, val2), val);',
+            'val1 = sample1(loc + vec3(0.0, 0.0, -step[2]));',
+            'val2 = sample1(loc + vec3(0.0, 0.0, +step[2]));',
+            'N[2] = val1 - val2;',
+            'val = max(max(val1, val2), val);',
+
+            'float gm = length(N); // gradient magnitude',
+            'N = normalize(N);',
+
+            // Flip normal so it points towards viewer
+            'float Nselect = float(dot(N, V) > 0.0);',
+            'N = (2.0 * Nselect - 1.0) * N;  // ==  Nselect * N - (1.0-Nselect)*N;',
+
+            // Init colors
+            'vec4 ambient_color = vec4(0.0, 0.0, 0.0, 0.0);',
+            'vec4 diffuse_color = vec4(0.0, 0.0, 0.0, 0.0);',
+            'vec4 specular_color = vec4(0.0, 0.0, 0.0, 0.0);',
+
+            // note: could allow multiple lights
+            'for (int i=0; i<1; i++)',
+            '{',
+                 // Get light direction (make sure to prevent zero devision)
+                'vec3 L = normalize(view_ray);  //lightDirs[i];',
+                'float lightEnabled = float( length(L) > 0.0 );',
+                'L = normalize(L + (1.0 - lightEnabled));',
+
+                // Calculate lighting properties
+                'float lambertTerm = clamp(dot(N, L), 0.0, 1.0);',
+                'vec3 H = normalize(L+V); // Halfway vector',
+                'float specularTerm = pow(max(dot(H, N), 0.0), shininess);',
+
+                // Calculate mask
+                'float mask1 = lightEnabled;',
+
+                // Calculate colors
+                'ambient_color +=  mask1 * ambient_color;  // * gl_LightSource[i].ambient;',
+                'diffuse_color +=  mask1 * lambertTerm;',
+                'specular_color += mask1 * specularTerm * specular_color;',
+            '}',
+
+            // Calculate final color by componing different components
+            'vec4 final_color;',
+            'vec4 color = apply_colormap(val);',
+            'final_color = color * (ambient_color + diffuse_color) + specular_color;',
+            'final_color.a = color.a;',
+            'return final_color;',
+        '}',
+	].join( '\n' )
+};

+ 14 - 0
examples/models/nrrd/README.txt

@@ -0,0 +1,14 @@
+# Source and license of the files in this directory
+
+
+## stent.nrrd
+
+A 3D volume converted from stent.npz from the imageio project.
+https://imageio.readthedocs.io/en/latest/standardimages.html
+
+It is in the public domain.
+
+
+## l.nrrd
+
+Unknown

二进制
examples/models/nrrd/stent.nrrd


二进制
examples/textures/cm_gray.png


二进制
examples/textures/cm_viridis.png


+ 216 - 0
examples/webgl_materials_texture3d_volume.html

@@ -0,0 +1,216 @@
+<!DOCTYPE html>
+<html lang="en">
+<head>
+	<title>three.js webgl - volume rendering example</title>
+	<meta charset="utf-8">
+	<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
+	<style>
+		body {
+			font-family: Monospace;
+			background-color: #000;
+			color: #fff;
+			margin: 0px;
+			overflow: hidden;
+		}
+		#info {
+			color: #fff;
+			position: absolute;
+			top: 10px;
+			width: 100%;
+			text-align: center;
+			z-index: 5;
+			display:block;
+		}
+		.dg {
+			z-index: 10 !important;
+		}
+		#info a, .button { color: #f00; font-weight: bold; text-decoration: underline; cursor: pointer }
+		#inset  {
+			width: 150px;
+			height: 150px;
+			background-color: transparent; /* or transparent; will show through only if renderer alpha: true */
+			border: none; /* or none; */
+			margin: 0;
+			padding: 0px;
+			position: absolute;
+			left: 20px;
+			bottom: 20px;
+			z-index: 100;
+		}
+	</style>
+</head>
+
+<body>
+	<div id="info">
+		<a href="http://threejs.org" target="_blank" rel="noopener">three.js</a> -
+		Float volume render test (mip / isosurface)
+	</div>
+	<div id="inset"></div>
+
+	<script src="../build/three.js"></script>
+
+	<script src="js/controls/OrbitControls.js"></script>
+
+	<script src="js/Volume.js"></script>
+	<script src="js/loaders/NRRDLoader.js"></script>
+	<script src="js/shaders/VolumeShader.js"></script>
+
+	<script src="js/Detector.js"></script>
+	<script src="js/libs/gunzip.min.js"></script>
+	<script src="js/libs/dat.gui.min.js"></script>
+
+	<script>
+
+		if ( ! Detector.webgl ) { Detector.addGetWebGLMessage(); }
+
+		var container,
+			renderer,
+			scene,
+			camera,
+			controls,
+			volmaterial,
+			volconfig,
+			cmtextures,
+			gui,
+			drawrequested;
+
+		init();
+
+		function init() {
+
+			scene = new THREE.Scene();
+
+			// Create renderer
+			var canvas = document.createElement( 'canvas' );
+			var context = canvas.getContext( 'webgl2' );
+			renderer = new THREE.WebGLRenderer( { canvas: canvas, context: context } );
+
+			// Attach renderer to DOM
+			container = document.createElement( 'div' );
+			document.body.appendChild( container );
+			container.appendChild( renderer.domElement );
+
+			// Create camera (The volume renderer does not work very well with perspective yet)
+			camera = new THREE.OrthographicCamera()
+			camera.position.z = 1000;
+			camera.near = 0.1;
+			camera.far = 5000;
+			camera.up = new THREE.Vector3( 0, 0, 1 );  // In our data, z is the up/down axis !!
+			scene.add( camera );
+
+			// Create controls
+			controls = new THREE.OrbitControls( camera, renderer.domElement );
+			controls.addEventListener( 'change', animate );
+			controls.target.set( 64, 64, 128 );
+			controls.enableDamping  = false;
+			controls.update();
+
+			// Lighting is backed into the shader a.t.m.
+			// var dirLight = new THREE.DirectionalLight( 0xffffff );
+
+			// The gui for interaction
+			volconfig = { clim1: 0, clim2: 1, renderstyle: 'iso', isothreshold: 0.15, colormap: 'viridis' };
+			var gui = new dat.GUI();
+			gui.add( volconfig, 'clim1', 0, 1, 0.01 ).onChange( updateUniforms );
+			gui.add( volconfig, 'clim2', 0, 1, 0.01 ).onChange( updateUniforms );
+			gui.add( volconfig, 'colormap', { gray: 'gray', viridis: 'viridis' } ).onChange( updateUniforms );
+			gui.add( volconfig, 'renderstyle', { mip: 'mip', iso: 'iso' } ).onChange( updateUniforms );
+			gui.add( volconfig, 'isothreshold', 0, 1, 0.01 ).onChange( updateUniforms );
+
+			// Keep track of window size (and initialize now)
+			window.addEventListener( 'resize', onWindowResize, false );
+			onWindowResize();
+
+			// Load the data ...
+			var loader = new THREE.NRRDLoader();
+			loader.load( "models/nrrd/stent.nrrd", function ( volume ) {
+				window.volume = volume;
+				// Texture to hold the volume. We have scalars, so we put our data in the red channel.
+				// THREEJS will select R32F (33326) based on the RedFormat and FloatType.
+				// Also see https://www.khronos.org/registry/webgl/specs/latest/2.0/#TEXTURE_TYPES_FORMATS_FROM_DOM_ELEMENTS_TABLE
+				// TODO: look the dtype up in the volume metadata
+				var texture = new THREE.Texture3D( volume.data, volume.xLength, volume.yLength, volume.zLength );
+				texture.format = THREE.RedFormat;
+				texture.type = THREE.FloatType;
+				texture.minFilter = texture.magFilter = THREE.LinearFilter;
+				texture.unpackAlignment = 1;
+				texture.needsUpdate = true;
+
+				// Colormap textures
+				cmtextures = {
+					viridis: new THREE.TextureLoader().load( 'textures/cm_viridis.png' ),
+					gray: new THREE.TextureLoader().load( 'textures/cm_gray.png' )
+				};
+
+				// Material (shaders) to render the volume using raycasting
+				volmaterial = new THREE.ShaderMaterial( THREE.VolumeRenderShader1 );
+				volmaterial.side = THREE.BackSide;  // The volume shader uses the backface as its "reference point"
+
+				// Apply standard volume material uniform info
+				volmaterial.uniforms.u_data.value = texture;
+				volmaterial.uniforms.u_size.value = [ volume.xLength, volume.yLength, volume.zLength ];
+
+				// Geometry for the volume
+				var volgeometry = new THREE.BoxGeometry( volume.xLength, volume.yLength, volume.zLength );
+				volgeometry = volgeometry.translate( volume.xLength / 2 - 0.5, volume.yLength / 2 - 0.5, volume.zLength / 2 - 0.5 );
+				var volcube = new THREE.Mesh( volgeometry, volmaterial );
+				scene.add( volcube );
+
+				// Apply uniforms now, this will call animate(), but apparently that's not enough sometimes
+				this.updateUniforms();
+				window.setTimeout(animate, 100);
+				window.setTimeout(animate, 1000);
+
+			} );
+
+		}
+
+		function updateUniforms() {
+
+			volmaterial.uniforms.u_clim.value = [ volconfig.clim1, volconfig.clim2 ];
+			volmaterial.uniforms.u_renderstyle.value = volconfig.renderstyle == 'mip' ? 0 : 1; // 0: MIP, 1: ISO
+			volmaterial.uniforms.u_renderthreshold.value = volconfig.isothreshold;  // For ISO renderstyle
+			volmaterial.uniforms.u_cmdata.value = cmtextures[volconfig.colormap];
+
+			volmaterial.needsUpdate = true;
+			animate();
+
+		}
+
+		function onWindowResize() {
+
+			camera.right = window.innerWidth / 5.0;
+			camera.left = -camera.right;
+			camera.top = window.innerHeight / 5.0;
+			camera.bottom = -camera.top;
+			camera.updateProjectionMatrix();
+
+			renderer.setPixelRatio( window.devicePixelRatio );
+			renderer.setSize( window.innerWidth, window.innerHeight );
+
+			if (controls.handleResize) { controls.handleResize(); }
+
+			animate();
+		}
+
+		function animate() {
+
+			// throttled animate function
+			if ( !drawrequested ) {
+				drawrequested = true;
+				window.requestAnimationFrame( _animate );
+			}
+
+		}
+
+		function _animate() {
+
+			drawrequested = false;
+			renderer.render( scene, camera );
+
+		}
+
+	</script>
+
+</body>
+</html>