xushiting hace 2 años
padre
commit
3f176a404b

+ 1 - 1
gulpfile.js

@@ -88,7 +88,7 @@ let shaders = [
 // from the command line to start the server (default port is 8080)
 gulp.task('webserver', gulp.series(async function() {
 	server = connect.server({
-		port: 1234,
+		port: 5000,
         host:'0.0.0.0',
 		https: false,
 	});

La diferencia del archivo ha sido suprimido porque es demasiado grande
+ 1 - 0
libs/three.js/libs/ktx-parse.module.js


+ 102 - 0
libs/utils/WorkerPool.js

@@ -0,0 +1,102 @@
+/**
+ * @author Deepkolos / https://github.com/deepkolos
+ */
+
+export class WorkerPool {
+
+	constructor( pool = 4 ) {
+
+		this.pool = pool;
+		this.queue = [];
+		this.workers = [];
+		this.workersResolve = [];
+		this.workerStatus = 0;
+
+	}
+
+	_initWorker( workerId ) {
+
+		if ( ! this.workers[ workerId ] ) {
+
+			const worker = this.workerCreator();
+			worker.addEventListener( 'message', this._onMessage.bind( this, workerId ) );
+			this.workers[ workerId ] = worker;
+
+		}
+
+	}
+
+	_getIdleWorker() {
+
+		for ( let i = 0; i < this.pool; i ++ )
+			if ( ! ( this.workerStatus & ( 1 << i ) ) ) return i;
+
+		return - 1;
+
+	}
+
+	_onMessage( workerId, msg ) {
+
+		const resolve = this.workersResolve[ workerId ];
+		resolve && resolve( msg );
+
+		if ( this.queue.length ) {
+
+			const { resolve, msg, transfer } = this.queue.shift();
+			this.workersResolve[ workerId ] = resolve;
+			this.workers[ workerId ].postMessage( msg, transfer );
+
+		} else {
+
+			this.workerStatus ^= 1 << workerId;
+
+		}
+
+	}
+
+	setWorkerCreator( workerCreator ) {
+
+		this.workerCreator = workerCreator;
+
+	}
+
+	setWorkerLimit( pool ) {
+
+		this.pool = pool;
+
+	}
+
+	postMessage( msg, transfer ) {
+
+		return new Promise( ( resolve ) => {
+
+			const workerId = this._getIdleWorker();
+
+			if ( workerId !== - 1 ) {
+
+				this._initWorker( workerId );
+				this.workerStatus |= 1 << workerId;
+				this.workersResolve[ workerId ] = resolve;
+				this.workers[ workerId ].postMessage( msg, transfer );
+
+			} else {
+
+				this.queue.push( { resolve, msg, transfer } );
+
+			}
+
+		} );
+
+	}
+
+	dispose() {
+
+		this.workers.forEach( ( worker ) => worker.terminate() );
+		this.workersResolve.length = 0;
+		this.workers.length = 0;
+		this.queue.length = 0;
+		this.workerStatus = 0;
+
+	}
+
+}

+ 54 - 0
src/materials/postprocessing/CopyShader.js

@@ -0,0 +1,54 @@
+/**
+ * @author alteredq / http://alteredqualia.com/
+ *
+ * Full-screen textured quad shader
+ */
+import * as THREE from "../../../libs/three.js/build/three.module.js";
+
+
+
+
+let CopyShader = {
+
+	uniforms: {
+
+		"tDiffuse": { value: null },
+		"opacity":  { value: 1.0 }
+
+	},
+
+	vertexShader: [
+
+		"varying vec2 vUv;",
+
+		"void main() {",
+
+			"vUv = uv;",
+			"gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );",
+
+		"}"
+
+	].join( "\n" ),
+
+	fragmentShader: [
+
+		"uniform float opacity;",
+
+		"uniform sampler2D tDiffuse;",
+
+		"varying vec2 vUv;",
+
+		"void main() {",
+
+			"vec4 texel = texture2D( tDiffuse, vUv );",     //如果开启premultipliedAlpha用这个,否则用注释的
+			"gl_FragColor = opacity * texel;",
+            
+            //"gl_FragColor = texture2D( tDiffuse, vUv );", 
+			//"gl_FragColor.a *= opacity;",
+
+		"}"
+
+	].join( "\n" )
+
+};
+export default CopyShader

+ 189 - 0
src/materials/postprocessing/EffectComposer.js

@@ -0,0 +1,189 @@
+/**
+ * @author alteredq / http://alteredqualia.com/
+ */
+import * as THREE from "../../../libs/three.js/build/three.module.js";
+import CopyShader from './CopyShader'
+import {ShaderPass} from './ShaderPass' 
+import { MaskPass, ClearMaskPass } from './MaskPass.js'; 
+
+
+var EffectComposer = function ( renderer, renderTarget ) {
+
+	this.renderer = renderer;
+
+	if ( renderTarget === undefined ) {
+
+		var parameters = {
+			minFilter: THREE.LinearFilter,
+			magFilter: THREE.LinearFilter,
+			format: THREE.RGBAFormat,
+			stencilBuffer: false, 
+		};
+
+		var size = renderer.getDrawingBufferSize(new THREE.Vector2);
+		renderTarget = new THREE.WebGLRenderTarget( size.width, size.height ,  parameters );
+		renderTarget.texture.name = 'EffectComposer.rt1';
+
+	}
+
+	this.renderTarget1 = renderTarget;
+	this.renderTarget2 = renderTarget.clone();
+	this.renderTarget2.texture.name = 'EffectComposer.rt2';
+
+	this.writeBuffer = this.renderTarget1;
+	this.readBuffer = this.renderTarget2;
+
+	this.passes = [];
+
+	// dependencies
+
+	/* if ( THREE.CopyShader === undefined ) {
+
+		console.error( 'THREE.EffectComposer relies on THREE.CopyShader' );
+
+	}
+
+	if ( THREE.ShaderPass === undefined ) {
+
+		console.error( 'THREE.EffectComposer relies on THREE.ShaderPass' );
+
+	} */
+
+	this.copyPass = new ShaderPass( CopyShader );
+    
+};
+
+Object.assign(  EffectComposer.prototype, {
+
+	swapBuffers: function () {
+
+		var tmp = this.readBuffer;
+		this.readBuffer = this.writeBuffer;
+		this.writeBuffer = tmp;
+
+	},
+
+	addPass: function ( pass ) {
+
+		this.passes.push( pass );
+
+		var size = this.renderer.getDrawingBufferSize(new THREE.Vector2);
+		pass.setSize( size.width, size.height );
+
+	},
+    
+    removePass: function(pass){ //add
+        let index = this.passes.indexOf(pass);
+        index > -1 && this.passes.splice(index,1)
+    },
+    
+	insertPass: function ( pass, index ) {
+
+		this.passes.splice( index, 0, pass );
+
+	},
+
+	render: function ( scene, camera, viewports, renderFun  ) {
+
+		var maskActive = false;
+        let passes = this.passes.filter(e=>e.enabled)
+		var pass, i, il = passes.length;
+
+        if(this.readTarget){ //add 使用当前renderTarget中的像素 
+            this.copyPass.render(scene, this.renderer, this.readBuffer, this.renderer.getRenderTarget()  );
+        }  
+        
+		for ( i = 0; i < il; i ++ ) {
+
+			pass = passes[ i ];
+ 
+
+            //if(i == il-1)pass.renderToScreen = true//
+                
+			pass.render( scene, camera, viewports, this.renderer, this.writeBuffer, this.readBuffer, maskActive, renderFun );
+    
+			if ( pass.needsSwap ) {
+
+				if ( maskActive ) {
+
+					var context = this.renderer.context;
+
+					context.stencilFunc( context.NOTEQUAL, 1, 0xffffffff );
+                    
+					this.copyPass.render( this.renderer, this.writeBuffer, this.readBuffer   );// delta 
+
+					context.stencilFunc( context.EQUAL, 1, 0xffffffff );
+
+				}
+
+				this.swapBuffers();
+
+			}
+
+			if ( MaskPass !== undefined ) {
+
+				if ( pass instanceof MaskPass ) {
+
+					maskActive = true;
+
+				} else if ( pass instanceof ClearMaskPass ) {
+
+					maskActive = false;
+
+				}
+
+			}  
+
+		}
+        
+        
+        //add
+         if(!pass.renderToScreen){ //最后一个如果没有绘制到屏幕or target上
+            this.copyPass.renderToScreen = true
+            
+            this.copyPass.render(null,null, this.renderer, this.writeBuffer, this.readBuffer)
+            
+        } 
+          
+	},
+
+	reset: function ( renderTarget ) {
+
+		if ( renderTarget === undefined ) {
+
+			var size = this.renderer.getDrawingBufferSize(new THREE.Vector2);
+
+			renderTarget = this.renderTarget1.clone();
+			renderTarget.setSize( size.width, size.height );
+
+		}
+
+		this.renderTarget1.dispose();
+		this.renderTarget2.dispose();
+		this.renderTarget1 = renderTarget;
+		this.renderTarget2 = renderTarget.clone();
+
+		this.writeBuffer = this.renderTarget1;
+		this.readBuffer = this.renderTarget2;
+
+	},
+
+	setSize: function ( width, height, scaleRatio ) {
+
+        scaleRatio = scaleRatio || 1;
+
+		this.renderTarget1.setSize( width * scaleRatio , height * scaleRatio );
+		this.renderTarget2.setSize( width * scaleRatio, height * scaleRatio );
+
+		for ( var i = 0; i < this.passes.length; i ++ ) {
+
+			this.passes[ i ].setSize( width * scaleRatio, height * scaleRatio  );
+
+		}
+
+	}
+
+} );
+
+
+export default EffectComposer 

+ 284 - 0
src/materials/postprocessing/FXAAShader.js

@@ -0,0 +1,284 @@
+import * as THREE from "../../../libs/three.js/build/three.module.js";
+
+/**
+ * NVIDIA FXAA by Timothy Lottes
+ * https://developer.download.nvidia.com/assets/gamedev/files/sdk/11/FXAA_WhitePaper.pdf
+ * - WebGL port by @supereggbert
+ * http://www.glge.org/demos/fxaa/
+ * Further improved by Daniel Sturk
+ */
+
+const FXAAShader = {
+
+	uniforms: {
+
+		'tDiffuse': { value: null },
+		'resolution': { value: new THREE.Vector2( 1 / 1024, 1 / 512 ) }
+
+	},
+
+	vertexShader: /* glsl */`
+
+		varying vec2 vUv;
+
+		void main() {
+
+			vUv = uv;
+			gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
+
+		}`,
+
+	fragmentShader: `
+	precision highp float;
+
+	uniform sampler2D tDiffuse;
+
+	uniform vec2 resolution;
+
+	varying vec2 vUv;
+
+	// FXAA 3.11 implementation by NVIDIA, ported to WebGL by Agost Biro (biro@archilogic.com)
+
+	//----------------------------------------------------------------------------------
+	// File:        es3-kepler\FXAA\assets\shaders/FXAA_DefaultES.frag
+	// SDK Version: v3.00
+	// Email:       gameworks@nvidia.com
+	// Site:        http://developer.nvidia.com/
+	//
+	// Copyright (c) 2014-2015, NVIDIA CORPORATION. All rights reserved.
+	//
+	// Redistribution and use in source and binary forms, with or without
+	// modification, are permitted provided that the following conditions
+	// are met:
+	//  * Redistributions of source code must retain the above copyright
+	//    notice, this list of conditions and the following disclaimer.
+	//  * Redistributions in binary form must reproduce the above copyright
+	//    notice, this list of conditions and the following disclaimer in the
+	//    documentation and/or other materials provided with the distribution.
+	//  * Neither the name of NVIDIA CORPORATION nor the names of its
+	//    contributors may be used to endorse or promote products derived
+	//    from this software without specific prior written permission.
+	//
+	// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ''AS IS'' AND ANY
+	// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+	// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+	// PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+	// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+	// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+	// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+	// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+	// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+	// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+	// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+	//
+	//----------------------------------------------------------------------------------
+
+	#ifndef FXAA_DISCARD
+			//
+			// Only valid for PC OpenGL currently.
+			// Probably will not work when FXAA_GREEN_AS_LUMA = 1.
+			//
+			// 1 = Use discard on pixels which don't need AA.
+			//     For APIs which enable concurrent TEX+ROP from same surface.
+			// 0 = Return unchanged color on pixels which don't need AA.
+			//
+			#define FXAA_DISCARD 0
+	#endif
+
+	/*--------------------------------------------------------------------------*/
+	#define FxaaTexTop(t, p) texture2D(t, p, -100.0)
+	#define FxaaTexOff(t, p, o, r) texture2D(t, p + (o * r), -100.0)
+	/*--------------------------------------------------------------------------*/
+
+	#define NUM_SAMPLES 5
+
+	// assumes colors have premultipliedAlpha, so that the calculated color contrast is scaled by alpha
+	float contrast( vec4 a, vec4 b ) {
+			vec4 diff = abs( a - b );
+			return max( max( max( diff.r, diff.g ), diff.b ), diff.a );
+	}
+
+	/*============================================================================
+
+									FXAA3 QUALITY - PC
+
+	============================================================================*/
+
+	/*--------------------------------------------------------------------------*/
+	vec4 FxaaPixelShader(
+			vec2 posM,
+			sampler2D tex,
+			vec2 fxaaQualityRcpFrame,
+			float fxaaQualityEdgeThreshold,
+			float fxaaQualityinvEdgeThreshold
+	) {
+			vec4 rgbaM = FxaaTexTop(tex, posM);
+			vec4 rgbaS = FxaaTexOff(tex, posM, vec2( 0.0, 1.0), fxaaQualityRcpFrame.xy);
+			vec4 rgbaE = FxaaTexOff(tex, posM, vec2( 1.0, 0.0), fxaaQualityRcpFrame.xy);
+			vec4 rgbaN = FxaaTexOff(tex, posM, vec2( 0.0,-1.0), fxaaQualityRcpFrame.xy);
+			vec4 rgbaW = FxaaTexOff(tex, posM, vec2(-1.0, 0.0), fxaaQualityRcpFrame.xy);
+			// . S .
+			// W M E
+			// . N .
+
+			bool earlyExit = max( max( max(
+					contrast( rgbaM, rgbaN ),
+					contrast( rgbaM, rgbaS ) ),
+					contrast( rgbaM, rgbaE ) ),
+					contrast( rgbaM, rgbaW ) )
+					< fxaaQualityEdgeThreshold;
+			// . 0 .
+			// 0 0 0
+			// . 0 .
+
+			#if (FXAA_DISCARD == 1)
+					if(earlyExit) FxaaDiscard;
+			#else
+					if(earlyExit) return rgbaM;
+			#endif
+
+			float contrastN = contrast( rgbaM, rgbaN );
+			float contrastS = contrast( rgbaM, rgbaS );
+			float contrastE = contrast( rgbaM, rgbaE );
+			float contrastW = contrast( rgbaM, rgbaW );
+
+			float relativeVContrast = ( contrastN + contrastS ) - ( contrastE + contrastW );
+			relativeVContrast *= fxaaQualityinvEdgeThreshold;
+
+			bool horzSpan = relativeVContrast > 0.;
+			// . 1 .
+			// 0 0 0
+			// . 1 .
+
+			// 45 deg edge detection and corners of objects, aka V/H contrast is too similar
+			if( abs( relativeVContrast ) < .3 ) {
+					// locate the edge
+					vec2 dirToEdge;
+					dirToEdge.x = contrastE > contrastW ? 1. : -1.;
+					dirToEdge.y = contrastS > contrastN ? 1. : -1.;
+					// . 2 .      . 1 .
+					// 1 0 2  ~=  0 0 1
+					// . 1 .      . 0 .
+
+					// tap 2 pixels and see which ones are "outside" the edge, to
+					// determine if the edge is vertical or horizontal
+
+					vec4 rgbaAlongH = FxaaTexOff(tex, posM, vec2( dirToEdge.x, -dirToEdge.y ), fxaaQualityRcpFrame.xy);
+					float matchAlongH = contrast( rgbaM, rgbaAlongH );
+					// . 1 .
+					// 0 0 1
+					// . 0 H
+
+					vec4 rgbaAlongV = FxaaTexOff(tex, posM, vec2( -dirToEdge.x, dirToEdge.y ), fxaaQualityRcpFrame.xy);
+					float matchAlongV = contrast( rgbaM, rgbaAlongV );
+					// V 1 .
+					// 0 0 1
+					// . 0 .
+
+					relativeVContrast = matchAlongV - matchAlongH;
+					relativeVContrast *= fxaaQualityinvEdgeThreshold;
+
+					if( abs( relativeVContrast ) < .3 ) { // 45 deg edge
+							// 1 1 .
+							// 0 0 1
+							// . 0 1
+
+							// do a simple blur
+							return mix(
+									rgbaM,
+									(rgbaN + rgbaS + rgbaE + rgbaW) * .25,
+									.4
+							);
+					}
+
+					horzSpan = relativeVContrast > 0.;
+			}
+
+			if(!horzSpan) rgbaN = rgbaW;
+			if(!horzSpan) rgbaS = rgbaE;
+			// . 0 .      1
+			// 1 0 1  ->  0
+			// . 0 .      1
+
+			bool pairN = contrast( rgbaM, rgbaN ) > contrast( rgbaM, rgbaS );
+			if(!pairN) rgbaN = rgbaS;
+
+			vec2 offNP;
+			offNP.x = (!horzSpan) ? 0.0 : fxaaQualityRcpFrame.x;
+			offNP.y = ( horzSpan) ? 0.0 : fxaaQualityRcpFrame.y;
+
+			bool doneN = false;
+			bool doneP = false;
+
+			float nDist = 0.;
+			float pDist = 0.;
+
+			vec2 posN = posM;
+			vec2 posP = posM;
+
+			int iterationsUsed = 0;
+			int iterationsUsedN = 0;
+			int iterationsUsedP = 0;
+			for( int i = 0; i < NUM_SAMPLES; i++ ) {
+					iterationsUsed = i;
+
+					float increment = float(i + 1);
+
+					if(!doneN) {
+							nDist += increment;
+							posN = posM + offNP * nDist;
+							vec4 rgbaEndN = FxaaTexTop(tex, posN.xy);
+							doneN = contrast( rgbaEndN, rgbaM ) > contrast( rgbaEndN, rgbaN );
+							iterationsUsedN = i;
+					}
+
+					if(!doneP) {
+							pDist += increment;
+							posP = posM - offNP * pDist;
+							vec4 rgbaEndP = FxaaTexTop(tex, posP.xy);
+							doneP = contrast( rgbaEndP, rgbaM ) > contrast( rgbaEndP, rgbaN );
+							iterationsUsedP = i;
+					}
+
+					if(doneN || doneP) break;
+			}
+
+
+			if ( !doneP && !doneN ) return rgbaM; // failed to find end of edge
+
+			float dist = min(
+					doneN ? float( iterationsUsedN ) / float( NUM_SAMPLES - 1 ) : 1.,
+					doneP ? float( iterationsUsedP ) / float( NUM_SAMPLES - 1 ) : 1.
+			);
+
+			// hacky way of reduces blurriness of mostly diagonal edges
+			// but reduces AA quality
+			dist = pow(dist, .5);
+
+			dist = 1. - dist;
+
+			return mix(
+					rgbaM,
+					rgbaN,
+					dist * .5
+			);
+	}
+
+	void main() {
+			const float edgeDetectionQuality = .05 ;  //越高,越保留细节;越低,越平滑 但模糊
+			const float invEdgeDetectionQuality = 1. / edgeDetectionQuality;
+
+			gl_FragColor = FxaaPixelShader(
+					vUv,
+					tDiffuse,
+					resolution,
+					edgeDetectionQuality, // [0,1] contrast needed, otherwise early discard
+					invEdgeDetectionQuality
+			);
+
+	}
+	`
+
+};
+
+export default FXAAShader  

+ 64 - 0
src/materials/postprocessing/LuminosityHighPassShader.js

@@ -0,0 +1,64 @@
+/**
+ * @author bhouston / http://clara.io/
+ *
+ * Luminosity
+ * http://en.wikipedia.org/wiki/Luminosity
+ */
+
+THREE.LuminosityHighPassShader = {
+
+  shaderID: "luminosityHighPass",
+
+	uniforms: {
+
+		"tDiffuse": { type: "t", value: null },
+		"luminosityThreshold": { type: "f", value: 1.0 },
+		"smoothWidth": { type: "f", value: 1.0 },
+		"defaultColor": { type: "c", value: new THREE.Color( 0x000000 ) },
+		"defaultOpacity":  { type: "f", value: 0.0 }
+
+	},
+
+	vertexShader: [
+
+		"varying vec2 vUv;",
+
+		"void main() {",
+
+			"vUv = uv;",
+
+			"gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );",
+
+		"}"
+
+	].join("\n"),
+
+	fragmentShader: [
+
+		"uniform sampler2D tDiffuse;",
+		"uniform vec3 defaultColor;",
+		"uniform float defaultOpacity;",
+		"uniform float luminosityThreshold;",
+		"uniform float smoothWidth;",
+
+		"varying vec2 vUv;",
+
+		"void main() {",
+
+			"vec4 texel = texture2D( tDiffuse, vUv );",
+
+			"vec3 luma = vec3( 0.299, 0.587, 0.114 );",
+
+			"float v = dot( texel.xyz, luma );",
+
+			"vec4 outputColor = vec4( defaultColor.rgb, defaultOpacity );",
+
+			"float alpha = smoothstep( luminosityThreshold, luminosityThreshold + smoothWidth, v );",
+
+			"gl_FragColor = mix( outputColor, texel, alpha );",
+
+		"}"
+
+	].join("\n")
+
+};

+ 101 - 0
src/materials/postprocessing/MaskPass.js

@@ -0,0 +1,101 @@
+import { Pass } from './ShaderPass.js';
+
+class MaskPass extends Pass {
+
+	constructor( scene, camera ) {
+
+		super();
+
+		this.scene = scene;
+		this.camera = camera;
+
+		this.clear = true;
+		this.needsSwap = false;
+
+		this.inverse = false;
+
+	}
+
+	render( renderer, writeBuffer, readBuffer /*, deltaTime, maskActive */ ) {
+
+		const context = renderer.getContext();
+		const state = renderer.state;
+
+		// don't update color or depth
+
+		state.buffers.color.setMask( false );
+		state.buffers.depth.setMask( false );
+
+		// lock buffers
+
+		state.buffers.color.setLocked( true );
+		state.buffers.depth.setLocked( true );
+
+		// set up stencil
+
+		let writeValue, clearValue;
+
+		if ( this.inverse ) {
+
+			writeValue = 0;
+			clearValue = 1;
+
+		} else {
+
+			writeValue = 1;
+			clearValue = 0;
+
+		}
+
+		state.buffers.stencil.setTest( true );
+		state.buffers.stencil.setOp( context.REPLACE, context.REPLACE, context.REPLACE );
+		state.buffers.stencil.setFunc( context.ALWAYS, writeValue, 0xffffffff );
+		state.buffers.stencil.setClear( clearValue );
+		state.buffers.stencil.setLocked( true );
+
+		// draw into the stencil buffer
+
+		renderer.setRenderTarget( readBuffer );
+		if ( this.clear ) renderer.clear();
+		renderer.render( this.scene, this.camera );
+
+		renderer.setRenderTarget( writeBuffer );
+		if ( this.clear ) renderer.clear();
+		renderer.render( this.scene, this.camera );
+
+		// unlock color and depth buffer for subsequent rendering
+
+		state.buffers.color.setLocked( false );
+		state.buffers.depth.setLocked( false );
+
+		// only render where stencil is set to 1
+
+		state.buffers.stencil.setLocked( false );
+		state.buffers.stencil.setFunc( context.EQUAL, 1, 0xffffffff ); // draw if == 1
+		state.buffers.stencil.setOp( context.KEEP, context.KEEP, context.KEEP );
+		state.buffers.stencil.setLocked( true );
+
+	}
+
+}
+
+class ClearMaskPass extends Pass {
+
+	constructor() {
+
+		super();
+
+		this.needsSwap = false;
+
+	}
+
+	render( renderer /*, writeBuffer, readBuffer, deltaTime, maskActive */ ) {
+
+		renderer.state.buffers.stencil.setLocked( false );
+		renderer.state.buffers.stencil.setTest( false );
+
+	}
+
+}
+
+export { MaskPass, ClearMaskPass };

+ 691 - 0
src/materials/postprocessing/OutlinePass.js

@@ -0,0 +1,691 @@
+/**
+ * @author spidersharma / http://eduperiment.com/
+ */
+import * as THREE from "../../../libs/three.js/build/three.module.js";
+import {Pass, ShaderPass} from './ShaderPass'
+import CopyShader from './CopyShader'
+
+
+let OutlinePass = function ( selectedObjects ) {
+
+	/* scene = scene;
+	camera = camera; */
+	this.selectedObjects = selectedObjects !== undefined ? selectedObjects : [];
+	this.visibleEdgeColor = new THREE.Color( 1, 1, 1 );
+	this.hiddenEdgeColor = new THREE.Color( 0.1, 0.04, 0.02 );
+	this.edgeGlow = 0.0;
+	this.usePatternTexture = false;
+	//this.edgeThickness = 1.0;
+	this.edgeStrength =  50;
+	this.downSampleRatio = 1//2;  // 抗锯齿  值越低renderTarget size越大,抗锯齿越强,线条可越细(或许可以把模糊化去掉?)
+	this.pulsePeriod = 0;
+
+	Pass.call( this );
+
+	this.resolution =  new THREE.Vector2( 256, 256 ); 
+
+	var pars = { minFilter: THREE.LinearFilter, magFilter: THREE.LinearFilter, format: THREE.RGBAFormat };
+
+	var resx = Math.round( this.resolution.x / this.downSampleRatio );
+	var resy = Math.round( this.resolution.y / this.downSampleRatio );
+
+	//this.maskBufferMaterial = new THREE.MeshBasicMaterial( { color: 0xffffff } );
+	//this.maskBufferMaterial.side = THREE.DoubleSide;
+	this.renderTargetMaskBuffer = new THREE.WebGLRenderTarget( this.resolution.x, this.resolution.y, pars );
+	this.renderTargetMaskBuffer.texture.name = "OutlinePass.mask";
+	this.renderTargetMaskBuffer.texture.generateMipmaps = false;
+
+	this.depthMaterial = new THREE.MeshDepthMaterial();
+	this.depthMaterial.side = THREE.DoubleSide;
+	this.depthMaterial.depthPacking = THREE.RGBADepthPacking;
+	this.depthMaterial.blending = THREE.NoBlending;
+
+	this.prepareMaskMaterial = this.getPrepareMaskMaterial();
+	this.prepareMaskMaterial.side = THREE.DoubleSide;
+	//this.replaceDepthToViewZ(  viewer.mainViewport.camera  /*  camera */ );
+
+	this.renderTargetDepthBuffer = new THREE.WebGLRenderTarget( this.resolution.x, this.resolution.y, pars );
+	this.renderTargetDepthBuffer.texture.name = "OutlinePass.depth";
+	this.renderTargetDepthBuffer.texture.generateMipmaps = false;
+
+	/* this.renderTargetMaskDownSampleBuffer = new THREE.WebGLRenderTarget( resx, resy, pars );
+	this.renderTargetMaskDownSampleBuffer.texture.name = "OutlinePass.depthDownSample";
+	this.renderTargetMaskDownSampleBuffer.texture.generateMipmaps = false;
+
+	this.renderTargetBlurBuffer1 = new THREE.WebGLRenderTarget( resx, resy, pars );
+	this.renderTargetBlurBuffer1.texture.name = "OutlinePass.blur1";
+	this.renderTargetBlurBuffer1.texture.generateMipmaps = false;
+	this.renderTargetBlurBuffer2 = new THREE.WebGLRenderTarget( Math.round( resx / 2 ), Math.round( resy / 2 ), pars );
+	this.renderTargetBlurBuffer2.texture.name = "OutlinePass.blur2";
+	this.renderTargetBlurBuffer2.texture.generateMipmaps = false; */
+
+	this.edgeDetectionMaterial = this.getEdgeDetectionMaterial(this.edgeStrength);
+     
+    
+	this.renderTargetEdgeBuffer1 = new THREE.WebGLRenderTarget( resx, resy, pars );
+	this.renderTargetEdgeBuffer1.texture.name = "OutlinePass.edge1";
+	this.renderTargetEdgeBuffer1.texture.generateMipmaps = false;
+	/* this.renderTargetEdgeBuffer2 = new THREE.WebGLRenderTarget( Math.round( resx / 2 ), Math.round( resy / 2 ), pars );
+	this.renderTargetEdgeBuffer2.texture.name = "OutlinePass.edge2";
+	this.renderTargetEdgeBuffer2.texture.generateMipmaps = false; 
+
+	var MAX_EDGE_THICKNESS = 4;
+	var MAX_EDGE_GLOW = 4;
+
+	this.separableBlurMaterial1 = this.getSeperableBlurMaterial( MAX_EDGE_THICKNESS );
+	this.separableBlurMaterial1.uniforms[ "texSize" ].value = new THREE.Vector2( resx, resy );
+	this.separableBlurMaterial1.uniforms[ "kernelRadius" ].value = 1;
+	this.separableBlurMaterial2 = this.getSeperableBlurMaterial( MAX_EDGE_GLOW );
+	this.separableBlurMaterial2.uniforms[ "texSize" ].value = new THREE.Vector2( Math.round( resx / 2 ), Math.round( resy / 2 ) );
+	this.separableBlurMaterial2.uniforms[ "kernelRadius" ].value = MAX_EDGE_GLOW;
+    */
+	// Overlay material
+	this.overlayMaterial = this.getOverlayMaterial();
+
+	// copy material 
+	this.copyUniforms = THREE.UniformsUtils.clone( CopyShader.uniforms );
+	this.copyUniforms[ "opacity" ].value = 1.0;
+
+	this.materialCopy = new THREE.ShaderMaterial( {
+		uniforms: this.copyUniforms,
+		vertexShader: CopyShader.vertexShader,
+		fragmentShader: CopyShader.fragmentShader,
+		blending: THREE.NoBlending,
+		depthTest: false,
+		depthWrite: false,
+		transparent: true
+	} );
+
+    
+
+
+
+
+
+
+
+	this.enabled = true;
+	this.needsSwap = false;
+
+	this.oldClearColor = new THREE.Color();
+	this.oldClearAlpha = 1;
+
+	this.camera = new THREE.OrthographicCamera( - 1, 1, 1, - 1, 0, 1 );
+	this.scene = new THREE.Scene();
+
+	this.quad = new THREE.Mesh( new THREE.PlaneBufferGeometry( 2, 2 ), null );
+	this.quad.frustumCulled = false; // Avoid getting clipped
+	this.scene.add( this.quad );
+
+	/* this.tempPulseColor1 = new THREE.Color();
+	this.tempPulseColor2 = new THREE.Color(); */
+	this.textureMatrix = new THREE.Matrix4();
+
+	
+
+};
+
+OutlinePass.prototype = Object.assign( Object.create(  Pass.prototype ), {
+
+	constructor: OutlinePass,
+
+	dispose: function () {
+
+		this.renderTargetMaskBuffer.dispose();
+        this.renderTargetEdgeBuffer1.dispose();
+        this.renderTargetDepthBuffer.dispose(); 
+	},
+    
+    replaceDepthToViewZ( camera ) { 
+		var type = camera.isPerspectiveCamera ? 'perspective' : 'orthographic';
+        if(type == this.lastCameraType )return
+        this.lastCameraType = type
+        this.prepareMaskMaterial.fragmentShader = this.prepareMaskMaterial.fragmentShader.replace( /DEPTH_TO_VIEW_Z/g, type + 'DepthToViewZ' );
+		this.prepareMaskMaterial.needsUpdate = true
+
+	},
+	setSize: function ( width, height ) {
+         
+        this.renderTargetEdgeBuffer1.setSize( width, height );
+		this.renderTargetMaskBuffer.setSize( width, height ); 
+        this.resolution.set(width,height) 
+
+	},
+
+	changeVisibilityOfSelectedObjects: function ( bVisible ) {
+
+		function gatherSelectedMeshesCallBack( object ) {
+
+			/* if ( object.isMesh ) { */
+            if ( object.isPointcloud || object.isMesh || object.isLine || object.isSprite ) {
+				/* if ( bVisible ) {
+
+					object.visible = object.userData.oldVisible;
+					delete object.userData.oldVisible;
+
+				} else {
+
+					object.userData.oldVisible = object.visible;
+					object.visible = bVisible;
+
+				} */
+                viewer.updateVisible(object, 'overlinePass', bVisible)
+			}
+
+		}
+
+		for ( var i = 0; i < this.selectedObjects.length; i ++ ) {
+
+			var selectedObject = this.selectedObjects[ i ];
+			selectedObject.traverse( gatherSelectedMeshesCallBack );
+
+		}
+
+	},
+
+	changeVisibilityOfNonSelectedObjects: function ( bVisible , scenes) {
+
+		var selectedMeshes = [];
+
+		function gatherSelectedMeshesCallBack( object ) {
+
+			//if ( object.isMesh ) selectedMeshes.push( object );
+            if ( object.isPointcloud || object.isMesh || object.isLine || object.isSprite ) {
+                selectedMeshes.push( object );
+            }
+            
+
+		}
+
+		for ( var i = 0; i < this.selectedObjects.length; i ++ ) {
+
+			var selectedObject = this.selectedObjects[ i ];
+			selectedObject.traverse( gatherSelectedMeshesCallBack );
+
+		}
+
+		function VisibilityChangeCallBack( object ) {
+
+			if ( object.isPointcloud || object.isMesh || object.isLine || object.isSprite ) {
+
+				var bFound = false;
+
+				for ( var i = 0; i < selectedMeshes.length; i ++ ) {
+
+					var selectedObjectId = selectedMeshes[ i ].id;
+
+					if ( selectedObjectId === object.id ) {
+
+						bFound = true;
+						break;
+
+					}
+
+				}
+
+				if ( ! bFound ) { 
+					 
+                    var visibility = object.visible;
+                    viewer.updateVisible(object, 'overlinePass', bVisible) //add 
+                    
+                    //但保不齐在设置为false后,渲染时又true了,所以在其他地方update时设置visible 得用updateVisible
+                     
+        
+                    if(!bVisible){
+                        object.visible = false
+                    }else{
+                        object.visible = !!object.bVisible
+                    }
+
+
+                    object.bVisible = visibility; 
+                    
+                    
+                    //这两种updateVisible 和 visible 设置都不能去掉, 第一块是为了防止有的visible不是通过updateVisible设置的; 第二块是为了防止渲染时updateVisible又修改了visible为true, 另外渲染时
+                    
+                    return {stopContinue:true} //for pointcloud
+                     
+				}
+                
+			}
+            
+		}
+        
+        scenes.forEach(scene=>scene.traverse( VisibilityChangeCallBack ))
+		
+
+	}, 
+     
+	updateTextureMatrix: function (camera) {
+
+		this.textureMatrix.set( 0.5, 0.0, 0.0, 0.5,
+			0.0, 0.5, 0.0, 0.5,
+			0.0, 0.0, 0.5, 0.5,
+			0.0, 0.0, 0.0, 1.0 );
+		this.textureMatrix.multiply( camera.projectionMatrix );
+		this.textureMatrix.multiply( camera.matrixWorldInverse );
+
+	},
+
+	render: function (scenes, camera, viewports,  renderer, writeBuffer, readBuffer,  maskActive, renderFun ) {
+        if(!(scenes instanceof Array))scenes = [scenes]
+        
+        
+        
+		if ( this.selectedObjects.length > 0 && this.edgeStrength > 0) {
+            
+            let render2 = (target, dontRenderRtEDL=true)=>{
+                if(renderFun){
+                    renderFun({target  , dontRenderRtEDL})
+                }else{
+                    renderer.setRenderTarget(target)
+                    renderer.clear()
+                    scenes.forEach(scene=>renderer.render( scene, camera)) 
+                }
+            }
+            viewports.forEach(e=>{ 
+                e.oldBeforeRender = e.beforeRender
+                e.beforeRender = ()=>{ 
+                    e.oldBeforeRender && e.oldBeforeRender()
+                    this.replaceDepthToViewZ( e.camera );
+                }
+            })
+            
+	   
+			this.oldClearColor.copy( renderer.getClearColor(new THREE.Color) );
+			this.oldClearAlpha = renderer.getClearAlpha();
+			let oldAutoClear = renderer.autoClear;
+            let oldTarget = renderer.getRenderTarget();
+			renderer.autoClear = false;
+
+			if ( maskActive ) renderer.context.disable( renderer.context.STENCIL_TEST );
+
+			renderer.setClearColor( 0xffffff, 1 );
+
+			// Make selected objects invisible
+			this.changeVisibilityOfSelectedObjects( false );
+            
+            scenes.forEach(scene=>{
+                scene.currentBackground = scene.background;
+                scene.background = null;
+                // 1. Draw Non Selected objects in the depth buffer
+                scene.overrideMaterial = this.depthMaterial;    
+            }) 
+            
+			
+			
+            render2(this.renderTargetDepthBuffer)
+            //renderer.setRenderTarget(this.renderTargetDepthBuffer)
+            //renderer.clear()
+			//renderer.render( scene, camera/* , this.renderTargetDepthBuffer, true  */);
+            
+			// Make selected objects visible
+			this.changeVisibilityOfSelectedObjects( true );
+
+    
+            viewports.forEach(e=>{
+                e.beforeRender = ()=>{ 
+                    e.oldBeforeRender && e.oldBeforeRender()
+                    // Update Texture Matrix for Depth compare
+                    this.updateTextureMatrix(e.camera);
+                    this.prepareMaskMaterial.uniforms[ "cameraNearFar" ].value = new THREE.Vector2( e.camera.near, e.camera.far );
+                }
+            })
+
+
+			
+
+			// Make non selected objects invisible, and draw only the selected objects, by comparing the depth buffer of non selected objects
+			this.changeVisibilityOfNonSelectedObjects( false , scenes);
+			
+            
+            scenes.forEach(scene=>{ 
+                scene.overrideMaterial = this.prepareMaskMaterial; 
+            })
+            
+            
+			this.prepareMaskMaterial.uniforms[ "depthTexture" ].value = this.renderTargetDepthBuffer.texture;
+			this.prepareMaskMaterial.uniforms[ "textureMatrix" ].value = this.textureMatrix;
+			 
+            //renderer.setRenderTarget(this.renderTargetMaskBuffer)
+            //renderer.clear()
+            //renderer.render( scene, camera/* , this.renderTargetMaskBuffer, true */ );
+            
+            viewer.scene.pointclouds.forEach(e=>{  //先将点云透明度变为1,因为点云透明度莫名其妙会影响其r值
+                e.material._oldOpa = e.material.opacity
+                e.material.opacity = 1
+            })
+            
+            render2(this.renderTargetMaskBuffer)
+            
+            viewer.scene.pointclouds.forEach(e=>{//恢复
+                e.material.opacity = e.material._oldOpa
+                delete e.material._oldOpa
+            })
+            
+            viewports.forEach((e)=>{e.beforeRender = e.oldBeforeRender})  
+            
+			this.changeVisibilityOfNonSelectedObjects( true , scenes);
+            
+            
+            scenes.forEach(scene=>{ 
+                scene.overrideMaterial = null;
+                scene.background = scene.currentBackground 
+            })
+			
+
+			 
+
+			// 3. Apply Edge Detection Pass
+			this.quad.material = this.edgeDetectionMaterial;
+			this.edgeDetectionMaterial.uniforms[ "maskTexture" ].value = this.renderTargetMaskBuffer.texture;//this.renderTargetMaskDownSampleBuffer.texture;
+			this.edgeDetectionMaterial.uniforms[ "texSize" ].value = new THREE.Vector2(this.resolution.x, this.resolution.y )//new THREE.Vector2( this.renderTargetMaskDownSampleBuffer.width, this.renderTargetMaskDownSampleBuffer.height );
+			//this.edgeDetectionMaterial.uniforms[ "texSize" ].value = new THREE.Vector2(this.renderTargetMaskBuffer.width, this.renderTargetMaskBuffer.height)//new THREE.Vector2( this.renderTargetMaskDownSampleBuffer.width, this.renderTargetMaskDownSampleBuffer.height );
+            this.edgeDetectionMaterial.uniforms[ "edgeStrength" ].value = this.edgeStrength;
+            
+            this.edgeDetectionMaterial.uniforms[ "visibleEdgeColor" ].value = this.visibleEdgeColor//this.tempPulseColor1;
+			this.edgeDetectionMaterial.uniforms[ "hiddenEdgeColor" ].value = this.hiddenEdgeColor //this.tempPulseColor2;
+			
+            
+            
+            let buffer = readBuffer
+            if ( this.renderToScreen ) {
+                this.quad.material.transparent = true
+                
+                buffer = null 
+                renderer.setClearColor( this.oldClearColor, this.oldClearAlpha );  
+                render2(null,false)
+                
+                //绘制到全屏
+                let renderSize = renderer.getSize(new THREE.Vector2()); //是client大小
+                renderer.setViewport(0, 0, renderSize.x, renderSize.y) //规定视口,影响图形变换(画布的使用范围) 
+                renderer.setScissorTest( false );
+                  
+            }else{
+                renderer.setClearColor( 0x000000, 0 );
+                renderer.clear()
+            } 
+            
+         
+            renderer.setRenderTarget(buffer/* this.renderTargetEdgeBuffer1 */)
+            
+            renderer.render( this.scene,  this.camera/* , this.renderTargetEdgeBuffer1, true  */);
+           
+
+            //这次删掉
+			/* // Blend it additively over the input texture
+			this.quad.material = this.overlayMaterial;
+			//this.overlayMaterial.uniforms[ "maskTexture" ].value = this.renderTargetMaskBuffer.texture;
+			this.overlayMaterial.uniforms[ "edgeTexture1" ].value = this.renderTargetEdgeBuffer1.texture;
+			//this.overlayMaterial.uniforms[ "edgeTexture2" ].value = this.renderTargetEdgeBuffer2.texture;
+			//this.overlayMaterial.uniforms[ "patternTexture" ].value = this.patternTexture;
+			this.overlayMaterial.uniforms[ "edgeStrength" ].value = this.edgeStrength;
+			//this.overlayMaterial.uniforms[ "edgeGlow" ].value = this.edgeGlow;
+			//this.overlayMaterial.uniforms[ "usePatternTexture" ].value = this.usePatternTexture;
+           
+
+			if ( maskActive ) renderer.context.enable( renderer.context.STENCIL_TEST ); 
+			//renderer.render( this.scene,  camera, readBuffer, false );
+            
+            //改:清空readBuffer, 仅绘制出outline的部分 
+            
+            renderer.setClearColor( 0x000000, 0 );
+            renderer.setRenderTarget(readBuffer)
+            renderer.clear()
+            renderer.render(  this.scene,  this.camera );
+            */
+            renderer.setRenderTarget(oldTarget) 
+			renderer.setClearColor( this.oldClearColor, this.oldClearAlpha );
+			renderer.autoClear = oldAutoClear;
+            return true
+		} 
+
+		/* if ( this.renderToScreen ) {
+
+			this.quad.material = this.materialCopy;
+			this.copyUniforms[ "tDiffuse" ].value = readBuffer.texture;
+			renderer.render(  this.scene ,  this.camera );
+
+		} */
+
+	},
+
+	getPrepareMaskMaterial: function () {
+
+		return new THREE.ShaderMaterial( {
+
+			uniforms: {
+				"depthTexture": { value: null },
+				"cameraNearFar": { value: new THREE.Vector2( 0.5, 0.5 ) },
+				"textureMatrix": { value: new THREE.Matrix4() }
+			},
+
+			vertexShader: [
+				'varying vec4 projTexCoord;',
+				'varying vec4 vPosition;',
+				'uniform mat4 textureMatrix;',
+
+				'void main() {',
+
+				'	vPosition = modelViewMatrix * vec4( position, 1.0 );',
+				'	vec4 worldPosition = modelMatrix * vec4( position, 1.0 );',
+				'	projTexCoord = textureMatrix * worldPosition;',
+				'	gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );',
+
+				'}'
+			].join( '\n' ),
+
+			fragmentShader: [
+				'#include <packing>',
+				'varying vec4 vPosition;',
+				'varying vec4 projTexCoord;',
+				'uniform sampler2D depthTexture;',
+				'uniform vec2 cameraNearFar;',
+
+				'void main() {',
+
+				'	float depth = unpackRGBAToDepth(texture2DProj( depthTexture, projTexCoord ));',
+				'	float viewZ = - DEPTH_TO_VIEW_Z( depth, cameraNearFar.x, cameraNearFar.y );',
+				'	float depthTest = (-vPosition.z > viewZ) ? 1.0 : 0.0;',   
+				'	gl_FragColor = vec4(0.0, depthTest, 1.0, 1.0);',
+
+				'}'
+			].join( '\n' )
+            //scene.overrideMaterial
+            //为什么画出来红色通道不为0,且depthTest似乎会改变红色通道的值
+		} );
+
+	},
+
+	getEdgeDetectionMaterial: function (edgeStrength) {
+
+		return new THREE.ShaderMaterial( {
+            
+			uniforms: {
+                "edgeStrength": { value: edgeStrength }, 
+				"maskTexture": { value: null },
+				"texSize": { value: new THREE.Vector2( 10, 10 ) },
+				"visibleEdgeColor": { value: new THREE.Vector3( 1.0, 1.0, 1.0 ) },
+				"hiddenEdgeColor": { value: new THREE.Vector3( 1.0, 1.0, 1.0 ) },
+			},
+
+			vertexShader:
+				"varying vec2 vUv;\n\
+				void main() {\n\
+					vUv = uv;\n\
+					gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );\n\
+				}",
+
+			/* fragmentShader:
+				"varying vec2 vUv;\
+				uniform sampler2D maskTexture;\
+				uniform vec2 texSize;\
+				uniform vec3 visibleEdgeColor;\
+				uniform vec3 hiddenEdgeColor;\
+				\
+				void main() {\n\
+					vec2 invSize = 1.0 / texSize;\
+					vec4 uvOffset = vec4(1.0, 0.0, 0.0, 1.0) * vec4(invSize, invSize);\
+					vec4 c1 = texture2D( maskTexture, vUv + uvOffset.xy);\
+					vec4 c2 = texture2D( maskTexture, vUv - uvOffset.xy);\
+					vec4 c3 = texture2D( maskTexture, vUv + uvOffset.yw);\
+					vec4 c4 = texture2D( maskTexture, vUv - uvOffset.yw);\
+					float diff1 = (c1.r - c2.r)*0.5;\
+					float diff2 = (c3.r - c4.r)*0.5;\
+					float d = length( vec2(diff1, diff2) );\
+					float a1 = min(c1.g, c2.g);\
+					float a2 = min(c3.g, c4.g);\
+					float visibilityFactor = min(a1, a2);\
+					vec3 edgeColor = 1.0 - visibilityFactor > 0.001 ? visibleEdgeColor : hiddenEdgeColor;\
+					gl_FragColor = vec4(edgeColor, 1.0) * vec4(d);\
+				}" */
+                fragmentShader:
+				`varying vec2 vUv;
+				uniform sampler2D maskTexture;
+                uniform float edgeStrength;
+				uniform vec2 texSize;
+				uniform vec3 visibleEdgeColor;
+				uniform vec3 hiddenEdgeColor;
+				
+				void main() { 
+                    const float thickness = 1.0;
+                    vec2 invSize = thickness / texSize;
+					vec4 uvOffset = vec4(1.0, 0.0, 0.0, 1.0) * vec4(invSize, invSize);
+					vec4 c1 = texture2D( maskTexture, vUv + uvOffset.xy);
+					vec4 c2 = texture2D( maskTexture, vUv - uvOffset.xy);
+					vec4 c3 = texture2D( maskTexture, vUv + uvOffset.yw);
+					vec4 c4 = texture2D( maskTexture, vUv - uvOffset.yw);
+					float diff1 = (c1.r - c2.r)*0.5;  //检测边缘,
+					float diff2 = (c3.r - c4.r)*0.5;
+					float d = length( vec2(diff1, diff2) ) * edgeStrength;
+					float a1 = min(c1.g, c2.g);
+					float a2 = min(c3.g, c4.g);
+					float visibilityFactor = min(a1, a2);
+					vec3 edgeColor = 1.0 - visibilityFactor > 0.001 ? visibleEdgeColor : hiddenEdgeColor;
+					//gl_FragColor = vec4(0.0,1.0,0.0,1.0); 
+                    gl_FragColor = vec4(edgeColor, 1.0) * vec4(d); 
+				}`
+		} );
+        //为什么vec4(0.0,1.0,1.0,1.0);  显示出来的是rgb(109,255,255) ? 几乎只有绿色通道会影响红色通道
+
+	},
+
+	getSeperableBlurMaterial: function ( maxRadius ) {
+
+		return new THREE.ShaderMaterial( {
+
+			defines: {
+				"MAX_RADIUS": maxRadius,
+			},
+
+			uniforms: {
+				"colorTexture": { value: null },
+				"texSize": { value: new THREE.Vector2( 0.5, 0.5 ) },
+				"direction": { value: new THREE.Vector2( 0.5, 0.5 ) },
+				"kernelRadius": { value: 1.0 }
+			},
+
+			vertexShader:
+				"varying vec2 vUv;\n\
+				void main() {\n\
+					vUv = uv;\n\
+					gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );\n\
+				}",
+
+			fragmentShader:
+				"#include <common>\
+				varying vec2 vUv;\
+				uniform sampler2D colorTexture;\
+				uniform vec2 texSize;\
+				uniform vec2 direction;\
+				uniform float kernelRadius;\
+				\
+				float gaussianPdf(in float x, in float sigma) {\
+					return 0.39894 * exp( -0.5 * x * x/( sigma * sigma))/sigma;\
+				}\
+				void main() {\
+					vec2 invSize = 1.0 / texSize;\
+					float weightSum = gaussianPdf(0.0, kernelRadius);\
+					vec3 diffuseSum = texture2D( colorTexture, vUv).rgb * weightSum;\
+					vec2 delta = direction * invSize * kernelRadius/float(MAX_RADIUS);\
+					vec2 uvOffset = delta;\
+					for( int i = 1; i <= MAX_RADIUS; i ++ ) {\
+						float w = gaussianPdf(uvOffset.x, kernelRadius);\
+						vec3 sample1 = texture2D( colorTexture, vUv + uvOffset).rgb;\
+						vec3 sample2 = texture2D( colorTexture, vUv - uvOffset).rgb;\
+						diffuseSum += ((sample1 + sample2) * w);\
+						weightSum += (2.0 * w);\
+						uvOffset += delta;\
+					}\
+					gl_FragColor = vec4(diffuseSum/weightSum, 1.0);\
+				}"  
+               
+		} );
+
+	},
+
+	getOverlayMaterial: function () {
+
+		return new THREE.ShaderMaterial( {
+
+			uniforms: {
+				"maskTexture": { value: null },
+				"edgeTexture1": { value: null },
+				"edgeTexture2": { value: null },
+				"patternTexture": { value: null },
+				"edgeStrength": { value: 1.0 },
+				"edgeGlow": { value: 1.0 },
+				"usePatternTexture": { value: 0.0 }
+			},
+
+			vertexShader:
+				"varying vec2 vUv;\n\
+				void main() {\n\
+					vUv = uv;\n\
+					gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );\n\
+				}",
+
+			/* fragmentShader:
+				"varying vec2 vUv;\
+				uniform sampler2D maskTexture;\
+				uniform sampler2D edgeTexture1;\
+				uniform sampler2D edgeTexture2;\
+				uniform sampler2D patternTexture;\
+				uniform float edgeStrength;\
+				uniform float edgeGlow;\
+				uniform bool usePatternTexture;\
+				\
+				void main() {\
+					vec4 edgeValue1 = texture2D(edgeTexture1, vUv);\
+					vec4 edgeValue2 = texture2D(edgeTexture2, vUv);\
+					vec4 maskColor = texture2D(maskTexture, vUv);\
+					vec4 patternColor = texture2D(patternTexture, 6.0 * vUv);\
+					float visibilityFactor = 1.0 - maskColor.g > 0.0 ? 1.0 : 0.5;\
+					vec4 edgeValue = edgeValue1 + edgeValue2 * edgeGlow;\
+					vec4 finalColor = edgeStrength * maskColor.r * edgeValue;\       // 删除 * maskColor.r  也就是去掉遮罩,使模型部分也有outline
+					if(usePatternTexture)\
+						finalColor += + visibilityFactor * (1.0 - maskColor.r) * (1.0 - patternColor.r);\
+					gl_FragColor = finalColor;\
+				}", */
+                fragmentShader:
+              `varying vec2 vUv;   
+				uniform sampler2D edgeTexture1;   
+				uniform float edgeStrength;   
+				 
+				void main() { 
+					gl_FragColor = edgeStrength * texture2D(edgeTexture1, vUv);  
+				}`,
+			blending: THREE.AdditiveBlending,
+			depthTest: false,
+			depthWrite: false,
+			transparent: true
+		} );
+
+	}
+
+} );
+
+OutlinePass.BlurDirectionX = new THREE.Vector2( 1.0, 0.0 );
+OutlinePass.BlurDirectionY = new THREE.Vector2( 0.0, 1.0 );
+
+export default OutlinePass

+ 91 - 0
src/materials/postprocessing/RenderPass.js

@@ -0,0 +1,91 @@
+/**
+ * @author alteredq / http://alteredqualia.com/
+ */
+import * as THREE from "../../../libs/three.js/build/three.module.js";
+import {Pass} from './ShaderPass'
+
+class RenderPass extends Pass {
+
+	constructor( overrideMaterial, clearColor, clearAlpha ) {
+
+		super();
+
+		/* this.scene = scene;
+		this.camera = camera; */
+
+		this.overrideMaterial = overrideMaterial;
+
+		this.clearColor = clearColor;
+		this.clearAlpha = ( clearAlpha !== undefined ) ? clearAlpha : 0;
+
+		this.clear = true;
+		this.clearDepth = false;
+		this.needsSwap = false;
+		this._oldClearColor = new THREE.Color();
+
+	}
+
+	render(scene, camera, renderer, writeBuffer, readBuffer /*, deltaTime, maskActive */ ) {
+
+		const oldAutoClear = renderer.autoClear;
+		renderer.autoClear = false;
+
+		let oldClearAlpha, oldOverrideMaterial;
+
+		if ( this.overrideMaterial !== undefined ) {
+
+			oldOverrideMaterial = this.scene.overrideMaterial;
+
+			 scene.overrideMaterial = this.overrideMaterial;
+
+		}
+
+		if ( this.clearColor ) {
+
+			renderer.getClearColor( this._oldClearColor );
+			oldClearAlpha = renderer.getClearAlpha();
+
+			renderer.setClearColor( this.clearColor, this.clearAlpha );
+
+		}
+
+		if ( this.clearDepth ) {
+
+			renderer.clearDepth();
+
+		}
+
+
+        let oldTarget = renderer.getRenderTarget();
+		if(!this.renderToScreen)renderer.setRenderTarget( readBuffer );
+
+		// TODO: Avoid using autoClear properties, see https://github.com/mrdoob/three.js/pull/15571#issuecomment-465669600
+		if ( this.clear ) renderer.clear( renderer.autoClearColor, renderer.autoClearDepth, renderer.autoClearStencil );
+		renderer.render( scene,  camera );
+
+
+        if(!this.renderToScreen)renderer.setRenderTarget( oldTarget );
+
+
+		if ( this.clearColor ) {
+
+			renderer.setClearColor( this._oldClearColor, oldClearAlpha );
+
+		}
+
+		if ( this.overrideMaterial !== undefined ) {
+
+			 scene.overrideMaterial = oldOverrideMaterial;
+
+		}
+
+		renderer.autoClear = oldAutoClear;
+
+	}
+
+}
+
+ 
+
+
+export default RenderPass

La diferencia del archivo ha sido suprimido porque es demasiado grande
+ 176 - 0
src/materials/postprocessing/SMAAPass.js


+ 462 - 0
src/materials/postprocessing/SMAAShader.js

@@ -0,0 +1,462 @@
+/**
+ * @author mpk / http://polko.me/
+ *
+ * WebGL port of Subpixel Morphological Antialiasing (SMAA) v2.8
+ * Preset: SMAA 1x Medium (with color edge detection)
+ * https://github.com/iryoku/smaa/releases/tag/v2.8
+ */
+
+THREE.SMAAShader = [ {
+
+	defines: {
+
+		"SMAA_THRESHOLD": "0.1"
+
+	},
+
+	uniforms: {
+
+		"tDiffuse":		{ value: null },
+		"resolution":	{ value: new THREE.Vector2( 1 / 1024, 1 / 512 ) }
+
+	},
+
+	vertexShader: [
+
+		"uniform vec2 resolution;",
+
+		"varying vec2 vUv;",
+		"varying vec4 vOffset[ 3 ];",
+
+		"void SMAAEdgeDetectionVS( vec2 texcoord ) {",
+			"vOffset[ 0 ] = texcoord.xyxy + resolution.xyxy * vec4( -1.0, 0.0, 0.0,  1.0 );", // WebGL port note: Changed sign in W component
+			"vOffset[ 1 ] = texcoord.xyxy + resolution.xyxy * vec4(  1.0, 0.0, 0.0, -1.0 );", // WebGL port note: Changed sign in W component
+			"vOffset[ 2 ] = texcoord.xyxy + resolution.xyxy * vec4( -2.0, 0.0, 0.0,  2.0 );", // WebGL port note: Changed sign in W component
+		"}",
+
+		"void main() {",
+
+			"vUv = uv;",
+
+			"SMAAEdgeDetectionVS( vUv );",
+
+			"gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );",
+
+		"}"
+
+	].join("\n"),
+
+	fragmentShader: [
+
+		"uniform sampler2D tDiffuse;",
+
+		"varying vec2 vUv;",
+		"varying vec4 vOffset[ 3 ];",
+
+		"vec4 SMAAColorEdgeDetectionPS( vec2 texcoord, vec4 offset[3], sampler2D colorTex ) {",
+			"vec2 threshold = vec2( SMAA_THRESHOLD, SMAA_THRESHOLD );",
+
+			// Calculate color deltas:
+			"vec4 delta;",
+			"vec3 C = texture2D( colorTex, texcoord ).rgb;",
+
+			"vec3 Cleft = texture2D( colorTex, offset[0].xy ).rgb;",
+			"vec3 t = abs( C - Cleft );",
+			"delta.x = max( max( t.r, t.g ), t.b );",
+
+			"vec3 Ctop = texture2D( colorTex, offset[0].zw ).rgb;",
+			"t = abs( C - Ctop );",
+			"delta.y = max( max( t.r, t.g ), t.b );",
+
+			// We do the usual threshold:
+			"vec2 edges = step( threshold, delta.xy );",
+
+			// Then discard if there is no edge:
+			"if ( dot( edges, vec2( 1.0, 1.0 ) ) == 0.0 )",
+				"discard;",
+
+			// Calculate right and bottom deltas:
+			"vec3 Cright = texture2D( colorTex, offset[1].xy ).rgb;",
+			"t = abs( C - Cright );",
+			"delta.z = max( max( t.r, t.g ), t.b );",
+
+			"vec3 Cbottom  = texture2D( colorTex, offset[1].zw ).rgb;",
+			"t = abs( C - Cbottom );",
+			"delta.w = max( max( t.r, t.g ), t.b );",
+
+			// Calculate the maximum delta in the direct neighborhood:
+			"float maxDelta = max( max( max( delta.x, delta.y ), delta.z ), delta.w );",
+
+			// Calculate left-left and top-top deltas:
+			"vec3 Cleftleft  = texture2D( colorTex, offset[2].xy ).rgb;",
+			"t = abs( C - Cleftleft );",
+			"delta.z = max( max( t.r, t.g ), t.b );",
+
+			"vec3 Ctoptop = texture2D( colorTex, offset[2].zw ).rgb;",
+			"t = abs( C - Ctoptop );",
+			"delta.w = max( max( t.r, t.g ), t.b );",
+
+			// Calculate the final maximum delta:
+			"maxDelta = max( max( maxDelta, delta.z ), delta.w );",
+
+			// Local contrast adaptation in action:
+			"edges.xy *= step( 0.5 * maxDelta, delta.xy );",
+
+			"return vec4( edges, 0.0, 0.0 );",
+		"}",
+
+		"void main() {",
+
+			"gl_FragColor = SMAAColorEdgeDetectionPS( vUv, vOffset, tDiffuse );",
+
+		"}"
+
+	].join("\n")
+
+}, {
+
+	defines: {
+
+		"SMAA_MAX_SEARCH_STEPS":		"8",
+		"SMAA_AREATEX_MAX_DISTANCE":	"16",
+		"SMAA_AREATEX_PIXEL_SIZE":		"( 1.0 / vec2( 160.0, 560.0 ) )",
+		"SMAA_AREATEX_SUBTEX_SIZE":		"( 1.0 / 7.0 )"
+
+	},
+
+	uniforms: {
+
+		"tDiffuse":		{ value: null },
+		"tArea":		{ value: null },
+		"tSearch":		{ value: null },
+		"resolution":	{ value: new THREE.Vector2( 1 / 1024, 1 / 512 ) }
+
+	},
+
+	vertexShader: [
+
+		"uniform vec2 resolution;",
+
+		"varying vec2 vUv;",
+		"varying vec4 vOffset[ 3 ];",
+		"varying vec2 vPixcoord;",
+
+		"void SMAABlendingWeightCalculationVS( vec2 texcoord ) {",
+			"vPixcoord = texcoord / resolution;",
+
+			// We will use these offsets for the searches later on (see @PSEUDO_GATHER4):
+			"vOffset[ 0 ] = texcoord.xyxy + resolution.xyxy * vec4( -0.25, 0.125, 1.25, 0.125 );", // WebGL port note: Changed sign in Y and W components
+			"vOffset[ 1 ] = texcoord.xyxy + resolution.xyxy * vec4( -0.125, 0.25, -0.125, -1.25 );", // WebGL port note: Changed sign in Y and W components
+
+			// And these for the searches, they indicate the ends of the loops:
+			"vOffset[ 2 ] = vec4( vOffset[ 0 ].xz, vOffset[ 1 ].yw ) + vec4( -2.0, 2.0, -2.0, 2.0 ) * resolution.xxyy * float( SMAA_MAX_SEARCH_STEPS );",
+
+		"}",
+
+		"void main() {",
+
+			"vUv = uv;",
+
+			"SMAABlendingWeightCalculationVS( vUv );",
+
+			"gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );",
+
+		"}"
+
+	].join("\n"),
+
+	fragmentShader: [
+
+		"#define SMAASampleLevelZeroOffset( tex, coord, offset ) texture2D( tex, coord + float( offset ) * resolution, 0.0 )",
+
+		"uniform sampler2D tDiffuse;",
+		"uniform sampler2D tArea;",
+		"uniform sampler2D tSearch;",
+		"uniform vec2 resolution;",
+
+		"varying vec2 vUv;",
+		"varying vec4 vOffset[3];",
+		"varying vec2 vPixcoord;",
+
+		"vec2 round( vec2 x ) {",
+			"return sign( x ) * floor( abs( x ) + 0.5 );",
+		"}",
+
+		"float SMAASearchLength( sampler2D searchTex, vec2 e, float bias, float scale ) {",
+			// Not required if searchTex accesses are set to point:
+			// float2 SEARCH_TEX_PIXEL_SIZE = 1.0 / float2(66.0, 33.0);
+			// e = float2(bias, 0.0) + 0.5 * SEARCH_TEX_PIXEL_SIZE +
+			//     e * float2(scale, 1.0) * float2(64.0, 32.0) * SEARCH_TEX_PIXEL_SIZE;
+			"e.r = bias + e.r * scale;",
+			"return 255.0 * texture2D( searchTex, e, 0.0 ).r;",
+		"}",
+
+		"float SMAASearchXLeft( sampler2D edgesTex, sampler2D searchTex, vec2 texcoord, float end ) {",
+			/**
+			* @PSEUDO_GATHER4
+			* This texcoord has been offset by (-0.25, -0.125) in the vertex shader to
+			* sample between edge, thus fetching four edges in a row.
+			* Sampling with different offsets in each direction allows to disambiguate
+			* which edges are active from the four fetched ones.
+			*/
+			"vec2 e = vec2( 0.0, 1.0 );",
+
+			"for ( int i = 0; i < SMAA_MAX_SEARCH_STEPS; i ++ ) {", // WebGL port note: Changed while to for
+				"e = texture2D( edgesTex, texcoord, 0.0 ).rg;",
+				"texcoord -= vec2( 2.0, 0.0 ) * resolution;",
+				"if ( ! ( texcoord.x > end && e.g > 0.8281 && e.r == 0.0 ) ) break;",
+			"}",
+
+			// We correct the previous (-0.25, -0.125) offset we applied:
+			"texcoord.x += 0.25 * resolution.x;",
+
+			// The searches are bias by 1, so adjust the coords accordingly:
+			"texcoord.x += resolution.x;",
+
+			// Disambiguate the length added by the last step:
+			"texcoord.x += 2.0 * resolution.x;", // Undo last step
+			"texcoord.x -= resolution.x * SMAASearchLength(searchTex, e, 0.0, 0.5);",
+
+			"return texcoord.x;",
+		"}",
+
+		"float SMAASearchXRight( sampler2D edgesTex, sampler2D searchTex, vec2 texcoord, float end ) {",
+			"vec2 e = vec2( 0.0, 1.0 );",
+
+			"for ( int i = 0; i < SMAA_MAX_SEARCH_STEPS; i ++ ) {", // WebGL port note: Changed while to for
+				"e = texture2D( edgesTex, texcoord, 0.0 ).rg;",
+				"texcoord += vec2( 2.0, 0.0 ) * resolution;",
+				"if ( ! ( texcoord.x < end && e.g > 0.8281 && e.r == 0.0 ) ) break;",
+			"}",
+
+			"texcoord.x -= 0.25 * resolution.x;",
+			"texcoord.x -= resolution.x;",
+			"texcoord.x -= 2.0 * resolution.x;",
+			"texcoord.x += resolution.x * SMAASearchLength( searchTex, e, 0.5, 0.5 );",
+
+			"return texcoord.x;",
+		"}",
+
+		"float SMAASearchYUp( sampler2D edgesTex, sampler2D searchTex, vec2 texcoord, float end ) {",
+			"vec2 e = vec2( 1.0, 0.0 );",
+
+			"for ( int i = 0; i < SMAA_MAX_SEARCH_STEPS; i ++ ) {", // WebGL port note: Changed while to for
+				"e = texture2D( edgesTex, texcoord, 0.0 ).rg;",
+				"texcoord += vec2( 0.0, 2.0 ) * resolution;", // WebGL port note: Changed sign
+				"if ( ! ( texcoord.y > end && e.r > 0.8281 && e.g == 0.0 ) ) break;",
+			"}",
+
+			"texcoord.y -= 0.25 * resolution.y;", // WebGL port note: Changed sign
+			"texcoord.y -= resolution.y;", // WebGL port note: Changed sign
+			"texcoord.y -= 2.0 * resolution.y;", // WebGL port note: Changed sign
+			"texcoord.y += resolution.y * SMAASearchLength( searchTex, e.gr, 0.0, 0.5 );", // WebGL port note: Changed sign
+
+			"return texcoord.y;",
+		"}",
+
+		"float SMAASearchYDown( sampler2D edgesTex, sampler2D searchTex, vec2 texcoord, float end ) {",
+			"vec2 e = vec2( 1.0, 0.0 );",
+
+			"for ( int i = 0; i < SMAA_MAX_SEARCH_STEPS; i ++ ) {", // WebGL port note: Changed while to for
+				"e = texture2D( edgesTex, texcoord, 0.0 ).rg;",
+				"texcoord -= vec2( 0.0, 2.0 ) * resolution;", // WebGL port note: Changed sign
+				"if ( ! ( texcoord.y < end && e.r > 0.8281 && e.g == 0.0 ) ) break;",
+			"}",
+
+			"texcoord.y += 0.25 * resolution.y;", // WebGL port note: Changed sign
+			"texcoord.y += resolution.y;", // WebGL port note: Changed sign
+			"texcoord.y += 2.0 * resolution.y;", // WebGL port note: Changed sign
+			"texcoord.y -= resolution.y * SMAASearchLength( searchTex, e.gr, 0.5, 0.5 );", // WebGL port note: Changed sign
+
+			"return texcoord.y;",
+		"}",
+
+		"vec2 SMAAArea( sampler2D areaTex, vec2 dist, float e1, float e2, float offset ) {",
+			// Rounding prevents precision errors of bilinear filtering:
+			"vec2 texcoord = float( SMAA_AREATEX_MAX_DISTANCE ) * round( 4.0 * vec2( e1, e2 ) ) + dist;",
+
+			// We do a scale and bias for mapping to texel space:
+			"texcoord = SMAA_AREATEX_PIXEL_SIZE * texcoord + ( 0.5 * SMAA_AREATEX_PIXEL_SIZE );",
+
+			// Move to proper place, according to the subpixel offset:
+			"texcoord.y += SMAA_AREATEX_SUBTEX_SIZE * offset;",
+
+			"return texture2D( areaTex, texcoord, 0.0 ).rg;",
+		"}",
+
+		"vec4 SMAABlendingWeightCalculationPS( vec2 texcoord, vec2 pixcoord, vec4 offset[ 3 ], sampler2D edgesTex, sampler2D areaTex, sampler2D searchTex, ivec4 subsampleIndices ) {",
+			"vec4 weights = vec4( 0.0, 0.0, 0.0, 0.0 );",
+
+			"vec2 e = texture2D( edgesTex, texcoord ).rg;",
+
+			"if ( e.g > 0.0 ) {", // Edge at north
+				"vec2 d;",
+
+				// Find the distance to the left:
+				"vec2 coords;",
+				"coords.x = SMAASearchXLeft( edgesTex, searchTex, offset[ 0 ].xy, offset[ 2 ].x );",
+				"coords.y = offset[ 1 ].y;", // offset[1].y = texcoord.y - 0.25 * resolution.y (@CROSSING_OFFSET)
+				"d.x = coords.x;",
+
+				// Now fetch the left crossing edges, two at a time using bilinear
+				// filtering. Sampling at -0.25 (see @CROSSING_OFFSET) enables to
+				// discern what value each edge has:
+				"float e1 = texture2D( edgesTex, coords, 0.0 ).r;",
+
+				// Find the distance to the right:
+				"coords.x = SMAASearchXRight( edgesTex, searchTex, offset[ 0 ].zw, offset[ 2 ].y );",
+				"d.y = coords.x;",
+
+				// We want the distances to be in pixel units (doing this here allow to
+				// better interleave arithmetic and memory accesses):
+				"d = d / resolution.x - pixcoord.x;",
+
+				// SMAAArea below needs a sqrt, as the areas texture is compressed
+				// quadratically:
+				"vec2 sqrt_d = sqrt( abs( d ) );",
+
+				// Fetch the right crossing edges:
+				"coords.y -= 1.0 * resolution.y;", // WebGL port note: Added
+				"float e2 = SMAASampleLevelZeroOffset( edgesTex, coords, ivec2( 1, 0 ) ).r;",
+
+				// Ok, we know how this pattern looks like, now it is time for getting
+				// the actual area:
+				"weights.rg = SMAAArea( areaTex, sqrt_d, e1, e2, float( subsampleIndices.y ) );",
+			"}",
+
+			"if ( e.r > 0.0 ) {", // Edge at west
+				"vec2 d;",
+
+				// Find the distance to the top:
+				"vec2 coords;",
+
+				"coords.y = SMAASearchYUp( edgesTex, searchTex, offset[ 1 ].xy, offset[ 2 ].z );",
+				"coords.x = offset[ 0 ].x;", // offset[1].x = texcoord.x - 0.25 * resolution.x;
+				"d.x = coords.y;",
+
+				// Fetch the top crossing edges:
+				"float e1 = texture2D( edgesTex, coords, 0.0 ).g;",
+
+				// Find the distance to the bottom:
+				"coords.y = SMAASearchYDown( edgesTex, searchTex, offset[ 1 ].zw, offset[ 2 ].w );",
+				"d.y = coords.y;",
+
+				// We want the distances to be in pixel units:
+				"d = d / resolution.y - pixcoord.y;",
+
+				// SMAAArea below needs a sqrt, as the areas texture is compressed
+				// quadratically:
+				"vec2 sqrt_d = sqrt( abs( d ) );",
+
+				// Fetch the bottom crossing edges:
+				"coords.y -= 1.0 * resolution.y;", // WebGL port note: Added
+				"float e2 = SMAASampleLevelZeroOffset( edgesTex, coords, ivec2( 0, 1 ) ).g;",
+
+				// Get the area for this direction:
+				"weights.ba = SMAAArea( areaTex, sqrt_d, e1, e2, float( subsampleIndices.x ) );",
+			"}",
+
+			"return weights;",
+		"}",
+
+		"void main() {",
+
+			"gl_FragColor = SMAABlendingWeightCalculationPS( vUv, vPixcoord, vOffset, tDiffuse, tArea, tSearch, ivec4( 0.0 ) );",
+
+		"}"
+
+	].join("\n")
+
+}, {
+
+	uniforms: {
+
+		"tDiffuse":		{ value: null },
+		"tColor":		{ value: null },
+		"resolution":	{ value: new THREE.Vector2( 1 / 1024, 1 / 512 ) }
+
+	},
+
+	vertexShader: [
+
+		"uniform vec2 resolution;",
+
+		"varying vec2 vUv;",
+		"varying vec4 vOffset[ 2 ];",
+
+		"void SMAANeighborhoodBlendingVS( vec2 texcoord ) {",
+			"vOffset[ 0 ] = texcoord.xyxy + resolution.xyxy * vec4( -1.0, 0.0, 0.0, 1.0 );", // WebGL port note: Changed sign in W component
+			"vOffset[ 1 ] = texcoord.xyxy + resolution.xyxy * vec4( 1.0, 0.0, 0.0, -1.0 );", // WebGL port note: Changed sign in W component
+		"}",
+
+		"void main() {",
+
+			"vUv = uv;",
+
+			"SMAANeighborhoodBlendingVS( vUv );",
+
+			"gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );",
+
+		"}"
+
+	].join("\n"),
+
+	fragmentShader: [
+
+		"uniform sampler2D tDiffuse;",
+		"uniform sampler2D tColor;",
+		"uniform vec2 resolution;",
+
+		"varying vec2 vUv;",
+		"varying vec4 vOffset[ 2 ];",
+
+		"vec4 SMAANeighborhoodBlendingPS( vec2 texcoord, vec4 offset[ 2 ], sampler2D colorTex, sampler2D blendTex ) {",
+			// Fetch the blending weights for current pixel:
+			"vec4 a;",
+			"a.xz = texture2D( blendTex, texcoord ).xz;",
+			"a.y = texture2D( blendTex, offset[ 1 ].zw ).g;",
+			"a.w = texture2D( blendTex, offset[ 1 ].xy ).a;",
+
+			// Is there any blending weight with a value greater than 0.0?
+			"if ( dot(a, vec4( 1.0, 1.0, 1.0, 1.0 )) < 1e-5 ) {",
+				"return texture2D( colorTex, texcoord, 0.0 );",
+			"} else {",
+				// Up to 4 lines can be crossing a pixel (one through each edge). We
+				// favor blending by choosing the line with the maximum weight for each
+				// direction:
+				"vec2 offset;",
+				"offset.x = a.a > a.b ? a.a : -a.b;", // left vs. right
+				"offset.y = a.g > a.r ? -a.g : a.r;", // top vs. bottom // WebGL port note: Changed signs
+
+				// Then we go in the direction that has the maximum weight:
+				"if ( abs( offset.x ) > abs( offset.y )) {", // horizontal vs. vertical
+					"offset.y = 0.0;",
+				"} else {",
+					"offset.x = 0.0;",
+				"}",
+
+				// Fetch the opposite color and lerp by hand:
+				"vec4 C = texture2D( colorTex, texcoord, 0.0 );",
+				"texcoord += sign( offset ) * resolution;",
+				"vec4 Cop = texture2D( colorTex, texcoord, 0.0 );",
+				"float s = abs( offset.x ) > abs( offset.y ) ? abs( offset.x ) : abs( offset.y );",
+
+				// WebGL port note: Added gamma correction
+				"C.xyz = pow(C.xyz, vec3(2.2));",
+				"Cop.xyz = pow(Cop.xyz, vec3(2.2));",
+				"vec4 mixed = mix(C, Cop, s);",
+				"mixed.xyz = pow(mixed.xyz, vec3(1.0 / 2.2));",
+
+				"return mixed;",
+			"}",
+		"}",
+
+		"void main() {",
+
+			"gl_FragColor = SMAANeighborhoodBlendingPS( vUv, vOffset, tColor, tDiffuse );",
+
+		"}"
+
+	].join("\n")
+
+} ];

+ 317 - 0
src/materials/postprocessing/SSAARenderPass.js

@@ -0,0 +1,317 @@
+/**
+*
+* Supersample Anti-Aliasing Render Pass
+*
+* @author bhouston / http://clara.io/
+*
+* This manual approach to SSAA re-renders the scene ones for each sample with camera jitter and accumulates the results.
+*
+* References: https://en.wikipedia.org/wiki/Supersampling
+*
+*/
+
+
+
+
+
+
+
+
+import * as THREE from "../../../libs/three.js/build/three.module.js";
+import {Pass, ShaderPass} from './ShaderPass'
+import CopyShader from './CopyShader'
+
+
+//较为原始的一种抗锯齿 (超级采样抗锯齿)
+let SSAARenderPass = function ( clearColor, clearAlpha ) {
+
+	Pass.call( this );
+
+	//this.scene //= scene;
+	//this.camera = camera;
+
+	this.sampleLevel = 4; // specified as n, where the number of samples is 2^n, so sampleLevel = 4, is 2^4 samples, 16.
+	this.unbiased = true;
+
+	// as we need to clear the buffer in this pass, clearColor must be set to something, defaults to black.
+	this.clearColor = ( clearColor !== undefined ) ? clearColor : 0x000000;
+	this.clearAlpha = ( clearAlpha !== undefined ) ? clearAlpha : 0;
+ 
+
+    this.renderUniforms =   { 
+        bgTex : {value:null},
+        outlineTex : {value:null},
+        opacity :  {value:1},
+    }
+       
+    this.renderMat = new THREE.ShaderMaterial({
+        uniforms: this.renderUniforms,
+		vertexShader: CopyShader.vertexShader,  
+        /* fragmentShader: CopyShader.fragmentShader, */
+        fragmentShader: ` 
+            uniform sampler2D bgTex;  
+            uniform sampler2D outlineTex; 
+            uniform float opacity;
+            varying vec2 vUv;
+            void main() {
+                vec4 color1 = texture2D( bgTex, vUv );
+                vec4 color2 = texture2D( outlineTex, vUv );  
+                gl_FragColor = opacity * mix(color1, color2, color2.a)  ;
+                 
+                
+            } 
+        `, 
+        premultipliedAlpha: true,
+		blending: THREE.AdditiveBlending,
+		depthTest: false,
+		depthWrite: false,
+		transparent: true
+    })
+    
+    
+    this.renderMat2 =  new THREE.ShaderMaterial({
+        uniforms: THREE.UniformsUtils.clone(CopyShader.uniforms) ,
+		vertexShader: CopyShader.vertexShader,  
+        fragmentShader:`uniform float opacity; 
+            uniform sampler2D tDiffuse; 
+            varying vec2 vUv;
+
+            void main() {
+     
+                vec4 texel = texture2D( tDiffuse, vUv );  
+                
+                if(texel.r == 0.0 && texel.g == 0.0 && texel.b == 0.0){
+                    discard;
+                }else{
+                    gl_FragColor = opacity * texel;
+                }
+            }    
+        ` ,
+        
+         
+		depthTest: false,
+		depthWrite: false,
+		transparent: true
+    })
+    
+    
+    
+    
+    
+    
+    ////////////////////
+   /*  this.renderMat.blendSrc = THREE.OneFactor //即将写入缓冲区的颜色。
+	this.renderMat.blendDst = THREE.OneFactor //缓冲区已经存在的颜色
+    this.renderMat.blendEquation = THREE.AddEquation; 
+    this.renderMat.blendEquationAlpha = THREE.AddEquation;
+    this.renderMat.blendDstAlpha = THREE.SrcAlphaFactor 
+    this.renderMat.blendSrcAlpha = THREE.SrcAlphaFactor  */
+
+	this.camera2 = new THREE.OrthographicCamera( - 1, 1, 1, - 1, 0, 1 );
+	this.scene2	= new THREE.Scene();
+	this.quad2 = new THREE.Mesh( new THREE.PlaneBufferGeometry( 2, 2 ), this.renderMat/*  this.copyMaterial */ );
+	this.quad2.frustumCulled = false; // Avoid getting clipped
+	this.scene2.add( this.quad2 );
+
+
+    this.copyPass = new ShaderPass( CopyShader );
+    this.copyPass.renderToScreen = true
+};
+
+
+
+SSAARenderPass.prototype = Object.assign( Object.create( Pass.prototype ), {
+
+	constructor: SSAARenderPass,
+
+	dispose: function () {
+
+		if ( this.sampleRenderTarget ) {
+
+			this.sampleRenderTarget.dispose();
+			this.sampleRenderTarget = null;
+
+		}
+
+	},
+
+	setSize: function ( width, height ) {
+
+		if ( this.sampleRenderTarget )	this.sampleRenderTarget.setSize( width, height );
+        this.childPass && this.childPass.setSize(width, height)
+        
+	},
+    addPass: function (pass){ 
+        this.childPass = pass;
+    },
+	render: function (scene, camera, viewports, renderer, writeBuffer, readBuffer, maskActive, renderFun ) {
+        if(this.useCopy ){
+            scene = this.copyPass.scene; camera = this.copyPass.camera;
+        }
+		if ( ! this.sampleRenderTarget ) {
+
+			this.sampleRenderTarget = new THREE.WebGLRenderTarget( readBuffer.width, readBuffer.height, { minFilter: THREE.LinearFilter, magFilter: THREE.LinearFilter, format: THREE.RGBAFormat } );
+			this.sampleRenderTarget.texture.name = "SSAARenderPass.sample";
+
+		}
+
+		var jitterOffsets = SSAARenderPass.JitterVectors[ Math.max( 0, Math.min( this.sampleLevel, 5 ) ) ];
+
+		var autoClear = renderer.autoClear;
+		renderer.autoClear = false;
+
+		var oldClearColor = renderer.getClearColor(new THREE.Color).getHex();
+		var oldClearAlpha = renderer.getClearAlpha();
+        renderer.setClearColor( this.clearColor, this.clearAlpha );
+        
+		var baseSampleWeight = 1.0 / jitterOffsets.length;
+		var roundingRange = 1 / 32;
+		//this.copyUniforms[ "tDiffuse" ].value = this.sampleRenderTarget.texture;
+ 
+        let oldTarget = renderer.getRenderTarget();
+        
+        if(oldTarget){
+            if(oldTarget.scissorTest){
+                var width = oldTarget.scissor.w, height = oldTarget.scissor.z
+            }else{
+                var width = oldTarget.width, height = oldTarget.height; 
+            } 
+        }else{ 
+            var width = readBuffer.width, height = readBuffer.height;
+        }
+        
+		// render the scene multiple times, each slightly jitter offset from the last and accumulate the results.
+		
+        let opa = 0
+        for ( var i = 0; i < jitterOffsets.length; i ++ ) {
+
+			var jitterOffset = jitterOffsets[ i ];
+
+			if ( camera.setViewOffset ) {
+
+				camera.setViewOffset( width, height,
+					jitterOffset[ 0 ] * 0.0625  , jitterOffset[ 1 ] * 0.0625  ,   // 0.0625 = 1 / 16
+					width, height );
+
+			}
+
+			var sampleWeight = baseSampleWeight;
+
+			if ( this.unbiased ) {//更柔和  
+				var uniformCenteredDistribution = ( - 0.5 + ( i + 0.5 ) / jitterOffsets.length );
+				sampleWeight += roundingRange * uniformCenteredDistribution; 
+			}
+ 
+            renderer.setRenderTarget(this.sampleRenderTarget)  
+            renderer.clear()
+            if(this.useCopy){
+                this.copyPass.render(scene,camera, renderer, writeBuffer, readBuffer )
+            }else{
+                if(renderFun){
+                    renderFun({target : this.sampleRenderTarget})
+                }else{
+                    renderer.render( scene, camera );
+                }
+            }
+            renderer.setRenderTarget(oldTarget)
+            
+            //--------------------- 
+            //获取outline tex   
+            let hasOutline = this.childPass && this.childPass.render(scene, camera, renderer, writeBuffer, readBuffer, null, renderFun )
+            
+            
+            //合成到该材质 
+            this.renderUniforms[ "bgTex" ].value = this.sampleRenderTarget.texture;
+			this.renderUniforms[ "outlineTex" ].value = hasOutline ? readBuffer.texture : null
+            this.renderUniforms[ "opacity" ].value = sampleWeight;
+            
+            
+            
+            /* console.log('sampleWeight', sampleWeight)
+            opa +=  sampleWeight  */
+            
+            if(!this.renderToScreen){
+                renderer.setRenderTarget(writeBuffer)
+            } 
+            if(i === 0 ){ 
+                renderer.setClearColor( 0x000000, 0 ); //叠加前颜色必须0
+                renderer.clear()
+            } 
+			renderer.render( this.scene2, this.camera2);  // , this.renderToScreen ? null : writeBuffer, ( i === 0 )   
+            if(!this.renderToScreen){
+                renderer.setRenderTarget(oldTarget)
+            } 
+
+
+            //if(i==2)break;
+		}
+        //console.log('sum:',opa)
+		if ( camera.clearViewOffset )camera.clearViewOffset();
+
+        //renderer.setRenderTarget(readBuffer)
+        //renderer.setClearColor( 0x000000, 0 );
+        //renderer.clear()
+        /* this.quad2.material = this.renderMat2
+        this.renderMat2.uniforms.tDiffuse.value = writeBuffer.texture; 
+        renderer.render( this.scene2, this.camera2);
+        this.quad2.material = this.renderMat    */
+        //renderer.setRenderTarget(oldTarget)
+        
+        
+        
+		renderer.autoClear = autoClear;
+		renderer.setClearColor( oldClearColor, oldClearAlpha );
+        
+        
+        
+        
+        /* 试了好几次,测量线的透明度还是还原不了。 clearAlpha十分影响结果。  
+        因为绘制测量线需要背景透明。  或许可以先全部绘制完后,再 copyshader中 抗锯齿?
+          
+          
+          另外会有黑边。
+        */
+	}
+
+} );
+
+
+// These jitter vectors are specified in integers because it is easier.
+// I am assuming a [-8,8) integer grid, but it needs to be mapped onto [-0.5,0.5)
+// before being used, thus these integers need to be scaled by 1/16.
+//
+// Sample patterns reference: https://msdn.microsoft.com/en-us/library/windows/desktop/ff476218%28v=vs.85%29.aspx?f=255&MSPPError=-2147217396
+SSAARenderPass.JitterVectors = [
+	[
+		[ 0, 0 ]
+	],
+	[
+		[ 4, 4 ], [ - 4, - 4 ]
+	],
+	[
+		[ - 2, - 6 ], [ 6, - 2 ], [ - 6, 2 ], [ 2, 6 ]
+	],
+	[
+		[ 1, - 3 ], [ - 1, 3 ], [ 5, 1 ], [ - 3, - 5 ],
+		[ - 5, 5 ], [ - 7, - 1 ], [ 3, 7 ], [ 7, - 7 ]
+	],
+	[
+		[ 1, 1 ], [ - 1, - 3 ], [ - 3, 2 ], [ 4, - 1 ],
+		[ - 5, - 2 ], [ 2, 5 ], [ 5, 3 ], [ 3, - 5 ],
+		[ - 2, 6 ], [ 0, - 7 ], [ - 4, - 6 ], [ - 6, 4 ],
+		[ - 8, 0 ], [ 7, - 4 ], [ 6, 7 ], [ - 7, - 8 ]
+	],
+	[
+		[ - 4, - 7 ], [ - 7, - 5 ], [ - 3, - 5 ], [ - 5, - 4 ],
+		[ - 1, - 4 ], [ - 2, - 2 ], [ - 6, - 1 ], [ - 4, 0 ],
+		[ - 7, 1 ], [ - 1, 2 ], [ - 6, 3 ], [ - 3, 3 ],
+		[ - 7, 6 ], [ - 3, 6 ], [ - 5, 7 ], [ - 1, 7 ],
+		[ 5, - 7 ], [ 1, - 6 ], [ 6, - 5 ], [ 4, - 4 ],
+		[ 2, - 3 ], [ 7, - 2 ], [ 1, - 1 ], [ 4, - 1 ],
+		[ 2, 1 ], [ 6, 2 ], [ 0, 4 ], [ 4, 4 ],
+		[ 2, 5 ], [ 7, 5 ], [ 5, 6 ], [ 3, 7 ]
+	]
+];
+//锯齿效果见 https://threejs.org/examples/?q=ssaa#webgl_postprocessing_ssaa
+
+export default SSAARenderPass

+ 110 - 0
src/materials/postprocessing/ShaderPass.js

@@ -0,0 +1,110 @@
+/**
+ * @author alteredq / http://alteredqualia.com/
+ */
+import * as THREE from "../../../libs/three.js/build/three.module.js";
+ 
+
+let Pass = function () {
+
+	// if set to true, the pass is processed by the composer
+	this.enabled = true;
+
+	// if set to true, the pass indicates to swap read and write buffer after rendering
+	this.needsSwap = true;
+
+	// if set to true, the pass clears its buffer before rendering
+	this.clear = false;
+
+	// if set to true, the result of the pass is rendered to screen
+	this.renderToScreen = false;
+
+};
+
+Object.assign( Pass.prototype, {
+
+	setSize: function ( width, height ) {},
+
+	render: function ( renderer, writeBuffer, readBuffer, delta, maskActive ) {
+
+		console.error( 'THREE.Pass: .render() must be implemented in derived pass.' );
+
+	}
+
+} );
+
+
+let ShaderPass = function ( shader, textureID ) {
+
+	Pass.call( this );
+
+	this.textureID = ( textureID !== undefined ) ? textureID : "tDiffuse";
+
+	if ( shader instanceof THREE.ShaderMaterial ) {
+
+		this.uniforms = shader.uniforms;
+
+		this.material = shader;
+
+	} else if ( shader ) {
+
+		this.uniforms = THREE.UniformsUtils.clone( shader.uniforms );
+
+		this.material = new THREE.ShaderMaterial( {
+
+			defines: Object.assign( {}, shader.defines ),
+			uniforms: this.uniforms,
+			vertexShader: shader.vertexShader,
+			fragmentShader: shader.fragmentShader,
+            transparent:true,//add 
+             
+		} );
+
+	}
+
+	this.camera = new THREE.OrthographicCamera( - 1, 1, 1, - 1, 0, 1 );
+	this.scene = new THREE.Scene();
+
+	this.quad = new THREE.Mesh( new THREE.PlaneBufferGeometry( 2, 2 ), null );
+	this.quad.frustumCulled = false; // Avoid getting clipped
+	this.scene.add( this.quad );
+
+};
+
+ShaderPass.prototype = Object.assign( Object.create(  Pass.prototype ), {
+
+	constructor: ShaderPass,
+
+	render: function(scene,camera, renderer, writeBuffer, readBuffer, delta, maskActive ) {
+        let oldTarget = renderer.getRenderTarget();
+        /* if(this.readTarget){ //add
+            readBuffer = oldTarget
+        } */
+
+
+		if ( this.uniforms[ this.textureID ] ) {
+
+			this.uniforms[ this.textureID ].value = readBuffer.texture;
+
+		}
+
+		this.quad.material = this.material;
+       
+		if ( this.renderToScreen ) {
+             
+			renderer.render( this.scene, this.camera );
+
+		} else {
+            renderer.setRenderTarget(writeBuffer)
+            if(this.clear) renderer.clear()
+			renderer.render( this.scene, this.camera );
+            renderer.setRenderTarget(oldTarget) 
+            
+            
+		}
+
+	}
+
+} );
+
+
+export {Pass, ShaderPass}

+ 131 - 0
src/materials/postprocessing/TAARenderPass.js

@@ -0,0 +1,131 @@
+/**
+ *
+ * Temporal Anti-Aliasing Render Pass
+ *
+ * @author bhouston / http://clara.io/
+ *
+ * When there is no motion in the scene, the TAA render pass accumulates jittered camera samples across frames to create a high quality anti-aliased result.
+ *
+ * References:
+ *
+ * TODO: Add support for motion vector pas so that accumulation of samples across frames can occur on dynamics scenes.
+ *
+ */
+
+THREE.TAARenderPass = function ( scene, camera, params ) {
+
+	if ( THREE.SSAARenderPass === undefined ) {
+
+		console.error( "THREE.TAARenderPass relies on THREE.SSAARenderPass" );
+
+	}
+
+	THREE.SSAARenderPass.call( this, scene, camera, params );
+
+	this.sampleLevel = 0;
+	this.accumulate = true//false;
+
+};
+
+THREE.TAARenderPass.JitterVectors = THREE.SSAARenderPass.JitterVectors;
+
+THREE.TAARenderPass.prototype = Object.assign( Object.create( THREE.SSAARenderPass.prototype ), {
+
+	constructor: THREE.TAARenderPass,
+
+	render: function ( renderer, writeBuffer, readBuffer, delta ) {
+
+		if ( ! this.accumulate ) {
+
+			THREE.SSAARenderPass.prototype.render.call( this, renderer, writeBuffer, readBuffer, delta );
+
+			this.accumulateIndex = - 1;
+			return;
+
+		}
+
+		var jitterOffsets = THREE.TAARenderPass.JitterVectors[ 5 ];
+
+		if ( ! this.sampleRenderTarget ) {
+
+			this.sampleRenderTarget = new THREE.WebGLRenderTarget( readBuffer.width, readBuffer.height, this.params );
+			this.sampleRenderTarget.texture.name = "TAARenderPass.sample";
+
+		}
+
+		if ( ! this.holdRenderTarget ) {
+
+			this.holdRenderTarget = new THREE.WebGLRenderTarget( readBuffer.width, readBuffer.height, this.params );
+			this.holdRenderTarget.texture.name = "TAARenderPass.hold";
+
+		}
+
+		if ( this.accumulate && this.accumulateIndex === - 1 ) {
+
+			THREE.SSAARenderPass.prototype.render.call( this, renderer, this.holdRenderTarget, readBuffer, delta );
+
+			this.accumulateIndex = 0;
+
+		}
+
+		var autoClear = renderer.autoClear;
+		renderer.autoClear = false;
+
+		var sampleWeight = 1.0 / ( jitterOffsets.length );
+
+		if ( this.accumulateIndex >= 0 && this.accumulateIndex < jitterOffsets.length ) {
+
+			this.copyUniforms[ "opacity" ].value = sampleWeight;
+			this.copyUniforms[ "tDiffuse" ].value = writeBuffer.texture;
+
+			// render the scene multiple times, each slightly jitter offset from the last and accumulate the results.
+			var numSamplesPerFrame = Math.pow( 2, this.sampleLevel );
+			for ( var i = 0; i < numSamplesPerFrame; i ++ ) {
+
+				var j = this.accumulateIndex;
+				var jitterOffset = jitterOffsets[ j ];
+
+				if ( this.camera.setViewOffset ) {
+
+					this.camera.setViewOffset( readBuffer.width, readBuffer.height,
+						jitterOffset[ 0 ] * 0.0625, jitterOffset[ 1 ] * 0.0625,   // 0.0625 = 1 / 16
+						readBuffer.width, readBuffer.height );
+
+				}
+
+				renderer.render( this.scene, this.camera, writeBuffer, true );
+				renderer.render( this.scene2, this.camera2, this.sampleRenderTarget, ( this.accumulateIndex === 0 ) );
+
+				this.accumulateIndex ++;
+
+				if ( this.accumulateIndex >= jitterOffsets.length ) break;
+
+			}
+
+			if ( this.camera.clearViewOffset ) this.camera.clearViewOffset();
+
+		}
+
+		var accumulationWeight = this.accumulateIndex * sampleWeight;
+
+		if ( accumulationWeight > 0 ) {
+
+			this.copyUniforms[ "opacity" ].value = 1.0;
+			this.copyUniforms[ "tDiffuse" ].value = this.sampleRenderTarget.texture;
+			renderer.render( this.scene2, this.camera2, writeBuffer, true );
+
+		}
+
+		if ( accumulationWeight < 1.0 ) {
+
+			this.copyUniforms[ "opacity" ].value = 1.0 - accumulationWeight;
+			this.copyUniforms[ "tDiffuse" ].value = this.holdRenderTarget.texture;
+			renderer.render( this.scene2, this.camera2, writeBuffer, ( accumulationWeight === 0 ) );
+
+		}
+
+		renderer.autoClear = autoClear;
+
+	}
+
+} );

+ 384 - 0
src/materials/postprocessing/UnrealBloomPass.js

@@ -0,0 +1,384 @@
+/**
+ * @author spidersharma / http://eduperiment.com/
+ *
+ * Inspired from Unreal Engine
+ * https://docs.unrealengine.com/latest/INT/Engine/Rendering/PostProcessEffects/Bloom/
+ */
+THREE.UnrealBloomPass = function ( resolution, strength, radius, threshold ) {
+
+	THREE.Pass.call( this );
+
+	this.strength = ( strength !== undefined ) ? strength : 1;
+	this.radius = radius;
+	this.threshold = threshold;
+	this.resolution = ( resolution !== undefined ) ? new THREE.Vector2( resolution.x, resolution.y ) : new THREE.Vector2( 256, 256 );
+
+	// create color only once here, reuse it later inside the render function
+	this.clearColor = new THREE.Color( 0, 0, 0 );
+
+	// render targets
+	var pars = { minFilter: THREE.LinearFilter, magFilter: THREE.LinearFilter, format: THREE.RGBAFormat };
+	this.renderTargetsHorizontal = [];
+	this.renderTargetsVertical = [];
+	this.nMips = 5;
+	var resx = Math.round( this.resolution.x / 2 );
+	var resy = Math.round( this.resolution.y / 2 );
+
+	this.renderTargetBright = new THREE.WebGLRenderTarget( resx, resy, pars );
+	this.renderTargetBright.texture.name = "UnrealBloomPass.bright";
+	this.renderTargetBright.texture.generateMipmaps = false;
+
+	for ( var i = 0; i < this.nMips; i ++ ) {
+
+		var renderTargetHorizonal = new THREE.WebGLRenderTarget( resx, resy, pars );
+
+		renderTargetHorizonal.texture.name = "UnrealBloomPass.h" + i;
+		renderTargetHorizonal.texture.generateMipmaps = false;
+
+		this.renderTargetsHorizontal.push( renderTargetHorizonal );
+
+		var renderTargetVertical = new THREE.WebGLRenderTarget( resx, resy, pars );
+
+		renderTargetVertical.texture.name = "UnrealBloomPass.v" + i;
+		renderTargetVertical.texture.generateMipmaps = false;
+
+		this.renderTargetsVertical.push( renderTargetVertical );
+
+		resx = Math.round( resx / 2 );
+
+		resy = Math.round( resy / 2 );
+
+	}
+
+	// luminosity high pass material
+
+	if ( THREE.LuminosityHighPassShader === undefined )
+		console.error( "THREE.UnrealBloomPass relies on THREE.LuminosityHighPassShader" );
+
+	var highPassShader = THREE.LuminosityHighPassShader;
+	this.highPassUniforms = THREE.UniformsUtils.clone( highPassShader.uniforms );
+
+	this.highPassUniforms[ "luminosityThreshold" ].value = threshold;
+	this.highPassUniforms[ "smoothWidth" ].value = 0.01;
+
+	this.materialHighPassFilter = new THREE.ShaderMaterial( {
+		uniforms: this.highPassUniforms,
+		vertexShader: highPassShader.vertexShader,
+		fragmentShader: highPassShader.fragmentShader,
+		defines: {}
+	} );
+
+	// Gaussian Blur Materials
+	this.separableBlurMaterials = [];
+	var kernelSizeArray = [ 3, 5, 7, 9, 11 ];
+	var resx = Math.round( this.resolution.x / 2 );
+	var resy = Math.round( this.resolution.y / 2 );
+
+	for ( var i = 0; i < this.nMips; i ++ ) {
+
+		this.separableBlurMaterials.push( this.getSeperableBlurMaterial( kernelSizeArray[ i ] ) );
+
+		this.separableBlurMaterials[ i ].uniforms[ "texSize" ].value = new THREE.Vector2( resx, resy );
+
+		resx = Math.round( resx / 2 );
+
+		resy = Math.round( resy / 2 );
+
+	}
+
+	// Composite material
+	this.compositeMaterial = this.getCompositeMaterial( this.nMips );
+	this.compositeMaterial.uniforms[ "blurTexture1" ].value = this.renderTargetsVertical[ 0 ].texture;
+	this.compositeMaterial.uniforms[ "blurTexture2" ].value = this.renderTargetsVertical[ 1 ].texture;
+	this.compositeMaterial.uniforms[ "blurTexture3" ].value = this.renderTargetsVertical[ 2 ].texture;
+	this.compositeMaterial.uniforms[ "blurTexture4" ].value = this.renderTargetsVertical[ 3 ].texture;
+	this.compositeMaterial.uniforms[ "blurTexture5" ].value = this.renderTargetsVertical[ 4 ].texture;
+	this.compositeMaterial.uniforms[ "bloomStrength" ].value = strength;
+	this.compositeMaterial.uniforms[ "bloomRadius" ].value = 0.1;
+	this.compositeMaterial.needsUpdate = true;
+
+	var bloomFactors = [ 1.0, 0.8, 0.6, 0.4, 0.2 ];
+	this.compositeMaterial.uniforms[ "bloomFactors" ].value = bloomFactors;
+	this.bloomTintColors = [ new THREE.Vector3( 1, 1, 1 ), new THREE.Vector3( 1, 1, 1 ), new THREE.Vector3( 1, 1, 1 ),
+							 new THREE.Vector3( 1, 1, 1 ), new THREE.Vector3( 1, 1, 1 ) ];
+	this.compositeMaterial.uniforms[ "bloomTintColors" ].value = this.bloomTintColors;
+
+	// copy material
+	if ( THREE.CopyShader === undefined ) {
+
+		console.error( "THREE.BloomPass relies on THREE.CopyShader" );
+
+	}
+
+	var copyShader = THREE.CopyShader;
+
+	this.copyUniforms = THREE.UniformsUtils.clone( copyShader.uniforms );
+	this.copyUniforms[ "opacity" ].value = 1.0;
+
+	this.materialCopy = new THREE.ShaderMaterial( {
+		uniforms: this.copyUniforms,
+		vertexShader: copyShader.vertexShader,
+		fragmentShader: copyShader.fragmentShader,
+		blending: THREE.AdditiveBlending,
+		depthTest: false,
+		depthWrite: false,
+		transparent: true
+	} );
+
+	this.enabled = true;
+	this.needsSwap = false;
+
+	this.oldClearColor = new THREE.Color();
+	this.oldClearAlpha = 1;
+
+	this.camera = new THREE.OrthographicCamera( - 1, 1, 1, - 1, 0, 1 );
+	this.scene = new THREE.Scene();
+
+	this.basic = new THREE.MeshBasicMaterial();
+
+	this.quad = new THREE.Mesh( new THREE.PlaneBufferGeometry( 2, 2 ), null );
+	this.quad.frustumCulled = false; // Avoid getting clipped
+	this.scene.add( this.quad );
+
+};
+
+THREE.UnrealBloomPass.prototype = Object.assign( Object.create( THREE.Pass.prototype ), {
+
+	constructor: THREE.UnrealBloomPass,
+
+	dispose: function () {
+
+		for ( var i = 0; i < this.renderTargetsHorizontal.length; i ++ ) {
+
+			this.renderTargetsHorizontal[ i ].dispose();
+
+		}
+
+		for ( var i = 0; i < this.renderTargetsVertical.length; i ++ ) {
+
+			this.renderTargetsVertical[ i ].dispose();
+
+		}
+
+		this.renderTargetBright.dispose();
+
+	},
+
+	setSize: function ( width, height ) {
+
+		var resx = Math.round( width / 2 );
+		var resy = Math.round( height / 2 );
+
+		this.renderTargetBright.setSize( resx, resy );
+
+		for ( var i = 0; i < this.nMips; i ++ ) {
+
+			this.renderTargetsHorizontal[ i ].setSize( resx, resy );
+			this.renderTargetsVertical[ i ].setSize( resx, resy );
+
+			this.separableBlurMaterials[ i ].uniforms[ "texSize" ].value = new THREE.Vector2( resx, resy );
+
+			resx = Math.round( resx / 2 );
+			resy = Math.round( resy / 2 );
+
+		}
+
+	},
+
+	render: function ( renderer, writeBuffer, readBuffer, delta, maskActive ) {
+
+		this.oldClearColor.copy( renderer.getClearColor() );
+		this.oldClearAlpha = renderer.getClearAlpha();
+		var oldAutoClear = renderer.autoClear;
+		renderer.autoClear = false;
+
+		renderer.setClearColor( this.clearColor, 0 );
+
+		if ( maskActive ) renderer.context.disable( renderer.context.STENCIL_TEST );
+
+		// Render input to screen
+
+		if ( this.renderToScreen ) {
+
+			this.quad.material = this.basic;
+			this.basic.map = readBuffer.texture;
+
+			renderer.render( this.scene, this.camera, undefined, true );
+
+		}
+
+		// 1. Extract Bright Areas
+
+		this.highPassUniforms[ "tDiffuse" ].value = readBuffer.texture;
+		this.highPassUniforms[ "luminosityThreshold" ].value = this.threshold;
+		this.quad.material = this.materialHighPassFilter;
+
+		renderer.render( this.scene, this.camera, this.renderTargetBright, true );
+
+		// 2. Blur All the mips progressively
+
+		var inputRenderTarget = this.renderTargetBright;
+
+		for ( var i = 0; i < this.nMips; i ++ ) {
+
+			this.quad.material = this.separableBlurMaterials[ i ];
+
+			this.separableBlurMaterials[ i ].uniforms[ "colorTexture" ].value = inputRenderTarget.texture;
+			this.separableBlurMaterials[ i ].uniforms[ "direction" ].value = THREE.UnrealBloomPass.BlurDirectionX;
+			renderer.render( this.scene, this.camera, this.renderTargetsHorizontal[ i ], true );
+
+			this.separableBlurMaterials[ i ].uniforms[ "colorTexture" ].value = this.renderTargetsHorizontal[ i ].texture;
+			this.separableBlurMaterials[ i ].uniforms[ "direction" ].value = THREE.UnrealBloomPass.BlurDirectionY;
+			renderer.render( this.scene, this.camera, this.renderTargetsVertical[ i ], true );
+
+			inputRenderTarget = this.renderTargetsVertical[ i ];
+
+		}
+
+		// Composite All the mips
+
+		this.quad.material = this.compositeMaterial;
+		this.compositeMaterial.uniforms[ "bloomStrength" ].value = this.strength;
+		this.compositeMaterial.uniforms[ "bloomRadius" ].value = this.radius;
+		this.compositeMaterial.uniforms[ "bloomTintColors" ].value = this.bloomTintColors;
+
+		renderer.render( this.scene, this.camera, this.renderTargetsHorizontal[ 0 ], true );
+
+		// Blend it additively over the input texture
+
+		this.quad.material = this.materialCopy;
+		this.copyUniforms[ "tDiffuse" ].value = this.renderTargetsHorizontal[ 0 ].texture;
+
+		if ( maskActive ) renderer.context.enable( renderer.context.STENCIL_TEST );
+
+
+		if ( this.renderToScreen ) {
+
+			renderer.render( this.scene, this.camera, undefined, false );
+
+		} else {
+
+			renderer.render( this.scene, this.camera, readBuffer, false );
+
+		}
+
+		// Restore renderer settings
+
+		renderer.setClearColor( this.oldClearColor, this.oldClearAlpha );
+		renderer.autoClear = oldAutoClear;
+
+	},
+
+	getSeperableBlurMaterial: function ( kernelRadius ) {
+
+		return new THREE.ShaderMaterial( {
+
+			defines: {
+				"KERNEL_RADIUS": kernelRadius,
+				"SIGMA": kernelRadius
+			},
+
+			uniforms: {
+				"colorTexture": { value: null },
+				"texSize": { value: new THREE.Vector2( 0.5, 0.5 ) },
+				"direction": { value: new THREE.Vector2( 0.5, 0.5 ) }
+			},
+
+			vertexShader:
+				"varying vec2 vUv;\n\
+				void main() {\n\
+					vUv = uv;\n\
+					gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );\n\
+				}",
+
+			fragmentShader:
+				"#include <common>\
+				varying vec2 vUv;\n\
+				uniform sampler2D colorTexture;\n\
+				uniform vec2 texSize;\
+				uniform vec2 direction;\
+				\
+				float gaussianPdf(in float x, in float sigma) {\
+					return 0.39894 * exp( -0.5 * x * x/( sigma * sigma))/sigma;\
+				}\
+				void main() {\n\
+					vec2 invSize = 1.0 / texSize;\
+					float fSigma = float(SIGMA);\
+					float weightSum = gaussianPdf(0.0, fSigma);\
+					vec3 diffuseSum = texture2D( colorTexture, vUv).rgb * weightSum;\
+					for( int i = 1; i < KERNEL_RADIUS; i ++ ) {\
+						float x = float(i);\
+						float w = gaussianPdf(x, fSigma);\
+						vec2 uvOffset = direction * invSize * x;\
+						vec3 sample1 = texture2D( colorTexture, vUv + uvOffset).rgb;\
+						vec3 sample2 = texture2D( colorTexture, vUv - uvOffset).rgb;\
+						diffuseSum += (sample1 + sample2) * w;\
+						weightSum += 2.0 * w;\
+					}\
+					gl_FragColor = vec4(diffuseSum/weightSum, 1.0);\n\
+				}"
+		} );
+
+	},
+
+	getCompositeMaterial: function ( nMips ) {
+
+		return new THREE.ShaderMaterial( {
+
+			defines: {
+				"NUM_MIPS": nMips
+			},
+
+			uniforms: {
+				"blurTexture1": { value: null },
+				"blurTexture2": { value: null },
+				"blurTexture3": { value: null },
+				"blurTexture4": { value: null },
+				"blurTexture5": { value: null },
+				"dirtTexture": { value: null },
+				"bloomStrength": { value: 1.0 },
+				"bloomFactors": { value: null },
+				"bloomTintColors": { value: null },
+				"bloomRadius": { value: 0.0 }
+			},
+
+			vertexShader:
+				"varying vec2 vUv;\n\
+				void main() {\n\
+					vUv = uv;\n\
+					gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );\n\
+				}",
+
+			fragmentShader:
+				"varying vec2 vUv;\
+				uniform sampler2D blurTexture1;\
+				uniform sampler2D blurTexture2;\
+				uniform sampler2D blurTexture3;\
+				uniform sampler2D blurTexture4;\
+				uniform sampler2D blurTexture5;\
+				uniform sampler2D dirtTexture;\
+				uniform float bloomStrength;\
+				uniform float bloomRadius;\
+				uniform float bloomFactors[NUM_MIPS];\
+				uniform vec3 bloomTintColors[NUM_MIPS];\
+				\
+				float lerpBloomFactor(const in float factor) { \
+					float mirrorFactor = 1.2 - factor;\
+					return mix(factor, mirrorFactor, bloomRadius);\
+				}\
+				\
+				void main() {\
+					gl_FragColor = bloomStrength * ( lerpBloomFactor(bloomFactors[0]) * vec4(bloomTintColors[0], 1.0) * texture2D(blurTexture1, vUv) + \
+													 lerpBloomFactor(bloomFactors[1]) * vec4(bloomTintColors[1], 1.0) * texture2D(blurTexture2, vUv) + \
+													 lerpBloomFactor(bloomFactors[2]) * vec4(bloomTintColors[2], 1.0) * texture2D(blurTexture3, vUv) + \
+													 lerpBloomFactor(bloomFactors[3]) * vec4(bloomTintColors[3], 1.0) * texture2D(blurTexture4, vUv) + \
+													 lerpBloomFactor(bloomFactors[4]) * vec4(bloomTintColors[4], 1.0) * texture2D(blurTexture5, vUv) );\
+				}"
+		} );
+
+	}
+
+} );
+
+THREE.UnrealBloomPass.BlurDirectionX = new THREE.Vector2( 1.0, 0.0 );
+THREE.UnrealBloomPass.BlurDirectionY = new THREE.Vector2( 0.0, 1.0 );

+ 64 - 0
src/modules/CameraAnimation/CamAniEditor.js

@@ -0,0 +1,64 @@
+import * as THREE from "../../../libs/three.js/build/three.module.js"; 
+import {CameraAnimation} from './CameraAnimation.js'
+
+
+
+let CamAniEditor = {
+    
+    
+     
+    
+    createAnimation(data){
+        let animation = new CameraAnimation(viewer);
+        if(data) { 
+            animation.name = data.name;
+            animation.duration = data.duration;
+            animation.useDurSlice = data.useDurSlice
+             
+            for(const cpdata of data.points){ 
+                /* const position = Potree.Utils.datasetPosTransform({ fromDataset: true, position: cpdata.position, datasetId: Potree.settings.originDatasetId })
+                const target = Potree.Utils.datasetPosTransform({ fromDataset: true, position: cpdata.target, datasetId: Potree.settings.originDatasetId })
+                 */
+                const position = new THREE.Vector3().copy(cpdata.position)
+                const target = new THREE.Vector3().copy(cpdata.target)  
+                const duration = cpdata.time
+                const cp = animation.createControlPoint(null, {position, target, duration}); 
+            }
+        }
+        
+        animation.changeCallback()
+        viewer.scene.addCameraAnimation(animation);
+         
+        return animation
+        
+    },
+    
+    
+    removeAnimation(animation){
+        animation.dispatchEvent('dispose')
+        viewer.scene.removeCameraAnimation(animation)
+    }
+    
+    
+    
+     
+    
+    
+    
+    
+    
+    
+    
+    
+    
+    
+    
+    
+    
+    
+    
+    
+}
+
+
+export default CamAniEditor

+ 314 - 0
src/modules/Images360/DepthImageSampler.js

@@ -0,0 +1,314 @@
+
+
+import math from "../../utils/math.js";
+
+class DepthImageSampler {
+    
+    constructor(){ 
+        var canvas = document.createElement("canvas");
+        this.canvas = canvas
+        this.context = canvas.getContext("2d")  
+        
+        
+        /* document.getElementsByTagName('body')[0].appendChild(canvas);
+        canvas.style.position = 'fixed';
+        canvas.style.width = '1024px';
+        canvas.style.top = canvas.style.left = 0
+        canvas.style['z-index'] = 100
+         */
+        
+    }
+ 
+     
+    changeImg(img){
+        if(this.img == img)return
+        this.canvas.width = img.width 
+        this.canvas.height = img.height 
+        this.context.drawImage(img, 0, 0)
+        this.img = img
+    } 
+    
+     
+    getDepth(UVx, UVy) {//根据图片像素获取深度值 
+        var x = Math.round(UVx * (this.canvas.width - 1))
+          , y = Math.round(UVy * (this.canvas.height - 1));
+        if (!(x < 0 || y < 0 || x >= this.width || y >= this.height)) {
+            var r = this.context.getImageData(x, y, 1, 1).data;
+            //console.log('color', r, x,y)
+            return r[1] + r[0] / 256
+        }
+    }   
+    
+    
+    sample( intersect, currentPano, onlyPos ) {//通过和skybox的intersect得到真实的intersect的位置
+        if(!intersect)return
+        let location = new THREE.Vector3
+        let normal
+        currentPano = currentPano || viewer.images360.currentPano
+         
+        if(currentPano != this.currentPano){
+            if(!currentPano.depthTex/*  || !currentPano.depthTex.image  */) return //未加载
+            this.changeImg(currentPano.depthTex.image)
+            this.currentPano = currentPano
+        }
+        
+        let origin = currentPano.position
+        let dir = intersect.dir || new THREE.Vector3().subVectors(intersect.point, origin).normalize()
+        //var uv = intersect.uv 
+        //let dirInPano = math.getNormalDir(dir, currentPano)//转化为考虑漫游点旋转的方向
+        
+        let dirInPano = dir.clone().applyMatrix4(currentPano.panoMatrix2Inverse).normalize(); //转化为考虑漫游点旋转的方向
+        let uv = math.getUVfromDir(dirInPano)//转化为uv
+        
+        let distance = this.getDepth(uv.x, uv.y);
+        //console.log('depth', depth,  uv.y)
+        if (!distance){
+            if(uv.y > 0.75){//漫游点底部识别不到的区域,给一个地板高度
+                //let height = origin.distanceTo(currentPano.floorPosition); 
+                const margin =  0.1
+                distance = (currentPano.floorPosition.z - origin.z - margin) / dir.z
+                location.copy(dir).multiplyScalar(distance).add(origin);
+                let normal = new THREE.Vector3(0,0,1)
+                
+                return {location, normal, distance} 
+            }
+            else return !1;  //应该是天空或模型外 , 因为很少有漫游点的地方还拍不到地板
+        }  
+        
+        //this.mainDepth = depth
+       
+        location.copy(dir).multiplyScalar(distance).add(origin);
+        
+        if(!onlyPos){
+           
+           var pL = this.getNearbyPoint(origin, uv, -1, 0)
+          , pR = this.getNearbyPoint(origin, uv,  1, 0)
+          , pB = this.getNearbyPoint(origin, uv,  0, -1)
+          , pT = this.getNearbyPoint(origin, uv,  0, 1);
+           
+            normal = this.planeFit(dir,location, pL,pR,pB,pT  )  
+        }
+ 
+          
+        /* if(normal.x != normal.x ){ 
+            console.log('NAN',  normal)
+            var pL = this.getNearbyPoint(origin, uv, -1, 0)
+          , pR = this.getNearbyPoint(origin, uv,  1, 0)
+          , pB = this.getNearbyPoint(origin, uv,  0, -1)
+          , pT = this.getNearbyPoint(origin, uv,  0, 1);
+           
+        } */
+        
+        //console.log('normal',normal)
+          
+        return {location, normal,  distance} 
+    }
+    
+    
+    getNearbyPoint(   origin, uv, x, y) { //获取附近的若干像素距离的点
+        let uv2 = uv.clone()  
+        uv2.x += x/(this.canvas.width-1); 
+        uv2.x = this.clampUV(uv2.x)
+         
+        uv2.y += y/(this.canvas.height-1);
+        uv2.y = this.clampUV(uv2.y)
+        
+        /* if(uv2.x < 0 || uv2.y < 0 || uv2.x > 1 || uv2.y > 1){
+            console.log('will nan')
+        } */
+        
+        let dir = math.getDirFromUV(uv2)//从uv获取到方向
+        dir.applyMatrix4(viewer.images360.currentPano.panoMatrix2)
+        let depth = this.getDepth(uv2.x, uv2.y);
+        /* if(Math.abs(depth - this.mainDepth) > 0.3){
+            console.log('Math.abs(depth - this.mainDepth) > 0.3')
+        } */
+        
+        //let dir = new THREE.Vector3().subVectors(intersect.point, origin).normalize()
+        let position = new THREE.Vector3().copy(dir).multiplyScalar(depth).add(origin);
+        
+        //console.log('getNearbyPoint', uv2, depth, dir, position )
+        
+        return position
+    } 
+
+    clampUV(v){
+        return (v + 1) % 1;   //  使输出在 0-1
+    }
+    
+    planeFit(dir, position, pL,pR,pB,pT ) {//求平均法线
+        let normal = new THREE.Vector3
+         
+        
+        let plane = new THREE.Plane; 
+        function addNormal(p1, p2) {//根据临接的四个点,分别求法线,然后法线相加能得到平均法线
+            if(!p1 || !p2)return
+            plane.setFromCoplanarPoints(position, p1, p2)
+            
+            //console.log('normalSub', plane.normal)
+            
+            normal.addScaledVector(plane.normal,  dir.dot(plane.normal) < 0 ? 1 : -1)//根据面的朝向判断加还是减
+        }
+        addNormal(pL, pB) 
+        addNormal(pL, pT) 
+        addNormal(pR, pB) 
+        addNormal(pR, pT) 
+         
+        if(0 !== normal.x || 0 !== normal.y || 0 !== normal.z){ 
+            normal.normalize()
+            //console.log(normal)
+            return normal
+        }
+         
+         
+        /* 四个面拼成一个菱形 */        
+          
+    } 
+      
+    
+    /* makeUvToPosMap(intersect, matrix1, vec1, vec2) {
+        var o = intersect.object.geometry
+          , a = o.attributes.position.array
+          , s = new THREE.Vector3(a[3 * intersect.face.a],a[3 * intersect.face.a + 1],a[3 * intersect.face.a + 2]).applyMatrix4(intersect.object.matrixWorld)
+          , c = new THREE.Vector3(a[3 * intersect.face.b],a[3 * intersect.face.b + 1],a[3 * intersect.face.b + 2]).applyMatrix4(intersect.object.matrixWorld)
+          , l = new THREE.Vector3(a[3 * intersect.face.c],a[3 * intersect.face.c + 1],a[3 * intersect.face.c + 2]).applyMatrix4(intersect.object.matrixWorld);
+        vec1.subVectors(s, c),
+        vec2.subVectors(l, c);
+        var u = o.attributes.uv.array
+          , d = new THREE.Vector2(u[2 * intersect.face.a],u[2 * intersect.face.a + 1])
+          , p = new THREE.Vector2(u[2 * intersect.face.b],u[2 * intersect.face.b + 1])
+          , h = new THREE.Vector2(u[2 * intersect.face.c],u[2 * intersect.face.c + 1])
+          , f = d.sub(p)
+          , g = h.sub(p);
+        matrix1.set(f.x, g.x, 0, f.y, g.y, 0, 0, 0, 1),
+        matrix1.getInverse(matrix1)
+    } */
+    
+
+  
+    /* getNearbyPoint(  point, origin, uv, o, a, s, x, y) {
+        var add = new THREE.Vector3(x, y , 0 ) 
+          , depth = this.getDepth(uv.x + add.x, uv.y + add.y  );
+           
+        if (void 0 !== depth) {
+            var f = add.applyMatrix3(o);
+            return (new THREE.Vector3).addScaledVector(a, f.x).addScaledVector(s, f.y).add(point).sub(origin).normalize().multiplyScalar(depth).add(origin)
+        }
+    } */
+    
+    
+    
+}
+
+/* 
+    注:
+    
+    当前测试的图不太对,三个通道都一样了,所以几乎是整数的depth。法线也几乎都朝向相机
+
+    由于有时候获取intersect需要知道是哪个点云,所以还是不能用这个。如加测量线。
+
+ */
+
+export default DepthImageSampler
+
+
+    /* var i = n(4)
+      , r = function() {
+        function t(t) {
+            
+        }
+        return t.prototype.getDepth = function(t, e) {
+            var n = Math.round(t)
+              , i = Math.round(e);
+            if (!(n < 0 || i < 0 || n >= this.width || i >= this.height)) {
+                var r = this.context.getImageData(n, i, 1, 1).data;
+                return r[1] + r[0] / 256
+            }
+        }
+        ,
+        Object.defineProperty(t.prototype, "width", {
+            get: function() {
+                return this.context.canvas.width
+            },
+            enumerable: !0,
+            configurable: !0
+        }),
+        Object.defineProperty(t.prototype, "height", {
+            get: function() {
+                return this.context.canvas.height
+            },
+            enumerable: !0,
+            configurable: !0
+        }),
+        t
+    }();
+    e.CanvasDepthImage = r;
+    var o = function() {
+        function t() {}
+        return t.sample = function(e, n, r, o, a) {
+            var s = n.uv
+              , c = s.x * (e.width - 1)
+              , l = (1 - s.y) * (e.height - 1)
+              , u = e.getDepth(c, l);
+            if (!u)
+                return !1;
+            o.copy(n.point).sub(r).normalize().multiplyScalar(u).add(r);
+            var d = new i.Matrix3
+              , p = new i.Vector3
+              , h = new i.Vector3;
+            t.makeUvToPosMap(n, d, p, h);
+            var f = this.getNearbyPoint(e, n.point, r, s, d, p, h, -1, 0)
+              , g = this.getNearbyPoint(e, n.point, r, s, d, p, h, 1, 0)
+              , m = this.getNearbyPoint(e, n.point, r, s, d, p, h, 0, -1)
+              , v = this.getNearbyPoint(e, n.point, r, s, d, p, h, 0, 1);
+            return this.planeFit(o, r, f, g, m, v, a)
+        }
+        ,
+        t.makeUvToPosMap = function(t, e, n, r) {
+            var o = t.object.geometry
+              , a = o.attributes.position.array
+              , s = new i.Vector3(a[3 * t.face.a],a[3 * t.face.a + 1],a[3 * t.face.a + 2]).applyMatrix4(t.object.matrixWorld)
+              , c = new i.Vector3(a[3 * t.face.b],a[3 * t.face.b + 1],a[3 * t.face.b + 2]).applyMatrix4(t.object.matrixWorld)
+              , l = new i.Vector3(a[3 * t.face.c],a[3 * t.face.c + 1],a[3 * t.face.c + 2]).applyMatrix4(t.object.matrixWorld);
+            n.subVectors(s, c),
+            r.subVectors(l, c);
+            var u = o.attributes.uv.array
+              , d = new i.Vector2(u[2 * t.face.a],u[2 * t.face.a + 1])
+              , p = new i.Vector2(u[2 * t.face.b],u[2 * t.face.b + 1])
+              , h = new i.Vector2(u[2 * t.face.c],u[2 * t.face.c + 1])
+              , f = d.sub(p)
+              , g = h.sub(p);
+            e.set(f.x, g.x, 0, f.y, g.y, 0, 0, 0, 1),
+            e.getInverse(e)
+        }
+        ,
+        t.getNearbyPoint = function(t, e, n, r, o, a, s, c, l) {
+            var u = new i.Vector3(c / (t.width - 1),l / (t.height - 1))
+              , d = (r.x + u.x) * (t.width - 1)
+              , p = (1 - (r.y + u.y)) * (t.height - 1)
+              , h = t.getDepth(d, p);
+            if (void 0 !== h) {
+                var f = u.applyMatrix3(o);
+                return (new i.Vector3).addScaledVector(a, f.x).addScaledVector(s, f.y).add(e).sub(n).normalize().multiplyScalar(h).add(n)
+            }
+        }
+        ,
+        t.planeFit = function(t, e, n, r, o, a, s) {
+            s.set(0, 0, 0);
+            var c = t.clone().sub(e)
+              , l = new i.Plane;
+            function u(e, n) {
+                e && n && (l.setFromCoplanarPoints(t, e, n),
+                s.addScaledVector(l.normal, c.dot(l.normal) < 0 ? 1 : -1))
+            }
+            return u(n, o),
+            u(n, a),
+            u(r, o),
+            u(r, a),
+            (0 !== s.x || 0 !== s.y || 0 !== s.z) && (s.normalize(),
+            !0)
+        }
+        ,
+        t
+    }();
+    */

+ 36 - 0
src/objects/fireParticle/Tween.js

@@ -0,0 +1,36 @@
+import * as THREE from "../../../libs/three.js/build/three.module.js";
+import Common  from "../../utils/Common.js";
+
+
+
+class Tween {
+
+    constructor(times, values) {
+      this.times = times || []
+      this.values = values || []
+    }
+  
+    lerp(t) {
+      if(this.times.length == 0) return
+      let i = 0, n = this.times.length
+      while(i < n && t > this.times[i]) i++
+      if(i == 0) return this.values[0]
+      if(i == n) return this.values[n-1]
+      const ratio = (t - this.times[i-1]) / (this.times[i] - this.times[i-1])
+      if(this.values[0] instanceof THREE.Vector3) {
+        return this.values[i-1].clone().lerp(this.values[i], ratio)
+      } else {
+        return this.values[i-1] + ratio * (this.values[i] - this.values[i-1])
+      }
+      
+    }
+    
+    
+    clone () {
+        return Common.CloneClassObject(this)
+	}
+    
+  
+  }
+  
+  export default Tween

+ 342 - 0
src/utils/SplitScreen4Views.js

@@ -0,0 +1,342 @@
+ 
+import * as THREE from "../../libs/three.js/build/three.module.js";
+
+import SplitScreen from "./SplitScreen.js";
+
+
+const viewportProps = [
+    {
+        left:0.5,
+        bottom:0.5,
+        width: 0.5,height:0.5,
+        name : 'MainView',   
+        //view: viewer.scene.view,
+        active: true,
+    },
+    {
+        left:0,
+        bottom:0.5,
+        width: 0.5,height:0.5,
+        name : 'top',   
+        name2 : 'mapViewport', 
+        axis:["x","y"],
+        direction : new THREE.Vector3(0,0,-1), //镜头朝向
+        //axisSign:[1,1],
+        active: true,
+        //相机位置在z轴正向
+    },
+    {
+        left:0.5,
+        bottom:0,
+        width: 0.5,height:0.5,
+        name : 'right', 
+        axis:["y","z"],
+        direction : new THREE.Vector3(1,0,0),
+        //axisSign:[1,1],
+        active: true,
+        //相机位置在x轴负向  右下角屏
+    },
+    {
+        left:0,
+        bottom:0,
+        width: 0.5,height:0.5, 
+        name : 'back', 
+        axis:["x","z"],
+        direction : new THREE.Vector3(0,-1,0),
+        //axisSign:[-1,1],    // 从镜头方向看  x向左 所以取负 
+        active: true,
+        //相机位置在y轴正向  左下角屏
+    },
+]
+
+
+
+
+var SplitScreen4Views = new SplitScreen()
+
+  
+
+ 
+SplitScreen4Views.split = function(o={}){
+    var defaultCamera = viewer.scene.getActiveCamera()
+   
+    let {boundSize, center} = viewer.bound
+        
+    viewer.setLimitFar(false) 
+    viewer.mapViewer.attachToMainViewer(true,'split4Screens','dontSet') 
+    
+    let viewports = this.splitStart(viewportProps)
+    
+    //覆盖在map上、点云等其他物体之下的一层背景
+    let mapViewport = viewer.mapViewer.viewports[0]   
+    mapViewport.noPointcloud = false
+    //隐藏地图游标
+    //viewer.updateVisible(viewer.mapViewer.cursor, 'split4Screens', false)
+    /* viewer.images360.panos.forEach(pano=>{
+        viewer.updateVisible(pano.mapMarker, 'split4Screens', false) //希望这时候mapMarker已经建好了吧
+    }) */
+        
+        
+        
+        
+    //材质 
+    this.statesBefore = { 
+        pointDensity : Potree.settings.pointDensity,
+        displayMode : Potree.settings.displayMode,
+        
+        position: viewer.images360.position,
+        target: viewer.scene.view.getPivot(),
+         
+        
+        //---
+        //ifShowMarker : Potree.settings.ifShowMarker, 
+    }
+    
+    viewer.setPointStandardMat(true,null,true) //切换到标准模式(主要为了mainViewport)  点云使用标准大小 
+    
+    var matBefore = { 
+        opacity : new Map() 
+    } 
+    var newOpacityMap = new Map() 
+     
+    viewer.scene.pointclouds.forEach(e=>{
+        matBefore.opacity.set(e, e.temp.pointOpacity) 
+        matBefore.colorType = e.material.activeAttributeName
+        
+        /* { 
+            var map = new Map()
+            newOpacityMap.set(e, map )
+            var size = e.bound.getSize()
+            viewports.forEach(viewport=>{//根据bound设置opacity,越小的要靠越近,需要大的opacity。但似乎影响太大了
+                if(viewport.name == 'MainView')return;
+                var prop = viewportProps.find(v => viewport.name == v.name2||viewport.name == v.name)
+                let axis = prop.axis
+                var width = size[axis[0]]
+                var height = size[axis[1]]
+                var area = width * height
+                map.set(viewport, 5000/area);
+            })
+            
+        }  */ 
+    }) 
+    
+    let beforeRender = function(){
+        viewer.scene.pointclouds.forEach(e=>{ 
+            if(this.name == "MainView"){ 
+                e.material.activeAttributeName = matBefore.colorType // 'rgba'
+                
+                e.material.useFilterByNormal = false 
+                e.changePointOpacity(matBefore.opacity.get(e)) //1 //恢复下 e.temp.pointOpacity 其实就是1
+                
+                Potree.settings.pointDensity = 'fourViewportsMain'/* 'fourViewports' */ //本来想比另外三屏高一点质量,结果发现会闪烁,因为点云加载需要时间 (navvis仿版也是一样,以后看看能否优化)
+                
+            }else{ 
+                e.material.activeAttributeName = "color"
+                e.material.useFilterByNormal = true 
+                
+                Potree.settings.pointDensity = 'fourViewports' //强制降低点云质量
+                
+                e.changePointOpacity(0.6/* newOpacityMap.get(e).get(viewport), true */);  //多数据集有的数据集很小,放大后显示特别淡
+                //console.log(e.name, viewport.name, e.temp.pointOpacity, e.material.opacity)
+            }                 
+        })  
+    }    
+    viewports.forEach(viewport=>{viewport.beforeRender = beforeRender})
+     
+     
+     
+    this.enableMap(false)
+    this.enableFloorplan(false)
+    viewer.mapViewer.setViewLimit('expand') //多数据集距离远时可以任意远,所以不限制了。但是这样就会看到地图边界了怎么办?
+    //viewer.dispatchEvent({'type': 'beginSplitView' }) 
+    //viewer.updateScreenSize({forceUpdateSize:true})   
+    
+    
+      
+    //this.viewportFitBound(mapViewport, boundSize, center)
+    //Potree.settings.ifShowMarker = false
+    Potree.settings.displayMode = 'showPointCloud'
+} 
+
+
+ 
+  
+
+
+SplitScreen4Views.recover = function(){
+    this.unSplit()
+    
+    /* const {width, height} = viewer.renderer.getSize(new THREE.Vector2());
+    viewer.renderer.setViewport(0,0,width,height)
+    viewer.renderer.setScissorTest( false ); */
+    
+    viewer.setView({
+        position: this.statesBefore.position,
+        target: this.statesBefore.target,
+        duration:300,
+        callback:function(){ 
+        }
+    })
+    
+    
+    
+    viewer.mainViewport.beforeRender = null 
+    viewer.setLimitFar(true)
+    
+    let mapViewport = viewer.mapViewer.viewports[0]
+    viewer.mapViewer.attachToMainViewer(false) 
+    //viewer.updateVisible(viewer.mapViewer.cursor, 'split4Screens', true)
+    /* viewer.images360.panos.forEach(pano=>{
+        viewer.updateVisible(pano.mapMarker, 'split4Screens', true)
+    }) */
+    mapViewport.noPointcloud = true
+    { 
+        this.enableMap(Potree.settings.mapEnable)
+        this.enableFloorplan(Potree.settings.floorplanEnable)
+        if(this.floorplanListener){
+            viewer.mapViewer.mapLayer.removeEventListener( 'floorplanLoaded', this.floorplanListener )  
+            this.floorplanListener = null  
+        } 
+    }
+     
+    Potree.settings.pointDensity = this.statesBefore.pointDensity
+    if(!Potree.settings.isOfficial){
+        Potree.settings.displayMode = this.statesBefore.displayMode
+    }
+    
+    viewer.scene.pointclouds.forEach(e=>{ 
+        //e.material.color.set(this.statesBefore.mat.color)
+        //e.material.activeAttributeName = this.statesBefore.mat.colorType 
+        e.material.useFilterByNormal = false
+        //e.material.opacity = this.statesBefore.mat.opacity  
+    }) 
+    viewer.setPointStandardMat(false)
+    viewer.mapViewer.setViewLimit('standard')
+    
+    //Potree.settings.ifShowMarker = this.statesBefore.ifShowMarker
+    //viewer.dispatchEvent({'type': 'finishSplitView' }) 
+    //viewer.updateScreenSize({forceUpdateSize:true})  
+    
+} 
+
+ 
+
+
+SplitScreen4Views.updateMapViewerBG = function(){
+    let mapViewport = viewer.mapViewer.viewports[0]
+    if(this.floorplanEnabled || this.mapEnabled){
+        mapViewport.background = 'overlayColor'
+        mapViewport.backgroundColor = new THREE.Color(0,0,0)
+        mapViewport.backgroundOpacity = 0.5; 
+    }else{
+        mapViewport.background = null
+        mapViewport.backgroundColor = null
+        mapViewport.backgroundOpacity = null
+    }
+}
+
+SplitScreen4Views.setFloorplanDisplay = function(e, show=false){ 
+    //viewer.updateVisible(e.floorplan.objectGroup, 'splitScreen', !!show)  
+    //e.floorplan.objectGroup.visible = !!show  
+    //viewer.mapViewer.mapLayer.needUpdate = true
+    e.floorplan.setEnable(show)
+}
+
+ 
+SplitScreen4Views.enableMap = function(enable){ 
+    let map = viewer.mapViewer.mapLayer.maps.find(e=>e.name == 'map')
+    //viewer.updateVisible(map.objectGroup, 'splitScreen', !!enable) 
+    //map.objectGroup.visible = !!enable
+    //if(enable)viewer.mapViewer.mapLayer.needUpdate = true //加载地图
+    map.setEnable(!!enable)
+    
+    
+    //viewer.mapViewer.mapGradientBG = viewer.background == 'gradient' && !enable
+    this.mapEnabled = enable
+    this.updateMapViewerBG()
+   
+    
+    
+},
+//直接覆盖原设置
+
+SplitScreen4Views.enableFloorplan = function(enable){ //是否让自定义的平面图显示
+    let floorplans = viewer.mapViewer.mapLayer.maps.filter(e=>e.name.includes('floorplan'))
+    
+    if(this.floorplanListener){
+        viewer.mapViewer.mapLayer.removeEventListener( 'floorplanLoaded', this.floorplanListener )  
+    }
+    this.floorplanListener = (e)=>{
+        this.setFloorplanDisplay(e, enable) 
+    }
+    
+    viewer.mapViewer.mapLayer.addEventListener( 'floorplanLoaded', this.floorplanListener ) //万一之后才加载 
+    
+    
+    if(!enable){ 
+        //隐藏平面图 
+        floorplans.forEach(floorplan=>this.setFloorplanDisplay({floorplan},false))  
+         
+    }else{
+         
+        floorplans.forEach(floorplan=>this.setFloorplanDisplay({floorplan},true))  
+        
+    }
+    
+    
+    if (enable && floorplans.length == 0) Potree.loadMapEntity('all',true)
+    
+    this.floorplanEnabled = enable
+    this.updateMapViewerBG()
+},
+
+/* viewportFitBound:function(viewport, boundSize, center){  //使一个viewport聚焦在某个范围
+    var prop = viewportProps.find(v => viewport.name == v.name2||viewport.name == v.name)
+    let axis = prop.axis 
+    let expand = 10;
+    let position = center.clone()
+    var moveAtAxis = ['x','y','z'].find(e=>!(axis.includes(e))) 
+    
+    if(viewport.name == 'mapViewport'){ 
+        let ori = viewport.view.position[moveAtAxis] 
+        position[moveAtAxis] = ori //不改变这个值,尤其是mapViewer中的z
+    }else{
+        position[moveAtAxis] += boundSize[moveAtAxis]/2+expand//移动到bounding边缘外
+    }
+    
+    viewport.view.position.copy(position)
+    
+    var width = Math.max(boundSize[axis[0]],  boundSize[axis[1]] * viewport.camera.aspect)//视口宽度(米)
+    var margin = 50 //px
+    viewport.camera.zoom = (viewport.resolution.x - margin) / width  
+    viewport.camera.updateProjectionMatrix()
+},
+ */
+ 
+SplitScreen4Views.focusOnPointCloud = function(pointcloud){//三个屏都聚焦在这个点云 
+    var boundSize = pointcloud.bound.getSize(new THREE.Vector3);
+    var center = pointcloud.bound.getCenter(new THREE.Vector3); 
+    let target = pointcloud.panosBound && pointcloud.panosBound.center  //看向pano集中的地方,也就是真正有点云的地方。(因为需要展示所有点云,所以没办法用这个做为center)
+    this.focusOnObject(pointcloud.bound, center,target)
+    
+    viewer.flyToDataset({pointcloud, dontMoveMap:true, duration:0})
+}
+
+SplitScreen4Views.focusOnObject = function(bound, center, target, duration=0){
+    viewer.viewports.forEach(e=>{
+        if(e.name == 'MainView'){
+            /* let len = boundSize.length()
+            let distance = THREE.Math.clamp(e.view.position.distanceTo(center),  len * 0.01,  len*0.3 ) //距离限制
+            //viewer.focusOnObject({position:center}, 'point', duration, {distance, direction: e.view.direction,dontMoveMap:true} )//平移镜头
+            //为了方便定位,直接飞到中心位置:
+            e.view.setView({
+                position:center,  duration,  target   
+            }) */  
+        }else{
+            this.viewportFitBound(e, bound, center)
+        }
+    })
+} 
+    
+ 
+export default SplitScreen4Views