pointcloud - 原始.vs 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981
  1. precision highp float;
  2. precision highp int;
  3. #define max_clip_polygons 8
  4. #define PI 3.141592653589793
  5. attribute vec3 position;
  6. attribute vec3 color;
  7. attribute float intensity;
  8. attribute float classification;
  9. attribute float returnNumber;
  10. attribute float numberOfReturns;
  11. attribute float pointSourceID;
  12. attribute vec4 indices;
  13. attribute float spacing;
  14. attribute float gpsTime;
  15. attribute vec3 normal;
  16. attribute float aExtra;
  17. uniform mat4 modelMatrix;
  18. uniform mat4 modelViewMatrix;
  19. uniform mat4 projectionMatrix;
  20. uniform mat4 viewMatrix;
  21. uniform mat4 uViewInv;
  22. uniform float uScreenWidth;
  23. uniform float uScreenHeight;
  24. uniform float fov;
  25. uniform float near;
  26. uniform float far;
  27. uniform bool uDebug;
  28. uniform bool uUseOrthographicCamera;
  29. uniform float uOrthoWidth;
  30. uniform float uOrthoHeight;
  31. #define CLIPTASK_NONE 0
  32. #define CLIPTASK_HIGHLIGHT 1
  33. #define CLIPTASK_SHOW_INSIDE 2
  34. #define CLIPTASK_SHOW_OUTSIDE 3
  35. #define CLIPMETHOD_INSIDE_ANY 0
  36. #define CLIPMETHOD_INSIDE_ALL 1
  37. uniform int clipTask;
  38. uniform int clipMethod;
  39. #if defined(num_clipboxes) && num_clipboxes > 0
  40. uniform mat4 clipBoxes[num_clipboxes];
  41. #endif
  42. #if defined(num_clipspheres) && num_clipspheres > 0
  43. uniform mat4 uClipSpheres[num_clipspheres];
  44. #endif
  45. #if defined(num_clippolygons) && num_clippolygons > 0
  46. uniform int uClipPolygonVCount[num_clippolygons];
  47. uniform vec3 uClipPolygonVertices[num_clippolygons * 8];
  48. uniform mat4 uClipPolygonWVP[num_clippolygons];
  49. #endif
  50. uniform float size;
  51. uniform float minSize;
  52. uniform float maxSize;
  53. uniform float uPCIndex;
  54. uniform float uOctreeSpacing;
  55. uniform float uNodeSpacing;
  56. uniform float uOctreeSize;
  57. uniform vec3 uBBSize;
  58. uniform float uLevel;
  59. uniform float uVNStart;
  60. uniform bool uIsLeafNode;
  61. uniform vec3 uColor;
  62. uniform float uOpacity;
  63. uniform vec2 elevationRange;
  64. uniform vec2 intensityRange;
  65. uniform vec2 uFilterReturnNumberRange;
  66. uniform vec2 uFilterNumberOfReturnsRange;
  67. uniform vec2 uFilterPointSourceIDClipRange;
  68. uniform vec2 uFilterGPSTimeClipRange;
  69. uniform float uGpsScale;
  70. uniform float uGpsOffset;
  71. uniform vec2 uNormalizedGpsBufferRange;
  72. uniform vec3 uIntensity_gbc;
  73. uniform vec3 uRGB_gbc;
  74. uniform vec3 uExtra_gbc;
  75. uniform float uTransition;
  76. uniform float wRGB;
  77. uniform float wIntensity;
  78. uniform float wElevation;
  79. uniform float wClassification;
  80. uniform float wReturnNumber;
  81. uniform float wSourceID;
  82. uniform vec2 uExtraNormalizedRange;
  83. uniform vec2 uExtraRange;
  84. uniform float uExtraScale;
  85. uniform float uExtraOffset;
  86. uniform vec3 uShadowColor;
  87. uniform sampler2D visibleNodes;
  88. uniform sampler2D gradient;
  89. uniform sampler2D classificationLUT;
  90. #if defined(color_type_matcap)
  91. uniform sampler2D matcapTextureUniform;
  92. #endif
  93. uniform bool backfaceCulling;
  94. #if defined(num_shadowmaps) && num_shadowmaps > 0
  95. uniform sampler2D uShadowMap[num_shadowmaps];
  96. uniform mat4 uShadowWorldView[num_shadowmaps];
  97. uniform mat4 uShadowProj[num_shadowmaps];
  98. #endif
  99. varying vec3 vColor;
  100. varying float vLogDepth;
  101. varying vec3 vViewPosition;
  102. varying float vRadius;
  103. varying float vPointSize;
  104. float round(float number){
  105. return floor(number + 0.5);
  106. }
  107. //
  108. // ### ######## ### ######## ######## #### ## ## ######## ###### #### ######## ######## ######
  109. // ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
  110. // ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
  111. // ## ## ## ## ## ## ######## ## ## ## ## ###### ###### ## ## ###### ######
  112. // ######### ## ## ######### ## ## ## ## ## ## ## ## ## ## ##
  113. // ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
  114. // ## ## ######## ## ## ## ## #### ### ######## ###### #### ######## ######## ######
  115. //
  116. // ---------------------
  117. // OCTREE
  118. // ---------------------
  119. #if (defined(adaptive_point_size) || defined(color_type_level_of_detail)) && defined(tree_type_octree)
  120. /**
  121. * number of 1-bits up to inclusive index position
  122. * number is treated as if it were an integer in the range 0-255
  123. *
  124. */
  125. int numberOfOnes(int number, int index){
  126. int numOnes = 0;
  127. int tmp = 128;
  128. for(int i = 7; i >= 0; i--){
  129. if(number >= tmp){
  130. number = number - tmp;
  131. if(i <= index){
  132. numOnes++;
  133. }
  134. }
  135. tmp = tmp / 2;
  136. }
  137. return numOnes;
  138. }
  139. /**
  140. * checks whether the bit at index is 1
  141. * number is treated as if it were an integer in the range 0-255
  142. *
  143. */
  144. bool isBitSet(int number, int index){
  145. // weird multi else if due to lack of proper array, int and bitwise support in WebGL 1.0
  146. int powi = 1;
  147. if(index == 0){
  148. powi = 1;
  149. }else if(index == 1){
  150. powi = 2;
  151. }else if(index == 2){
  152. powi = 4;
  153. }else if(index == 3){
  154. powi = 8;
  155. }else if(index == 4){
  156. powi = 16;
  157. }else if(index == 5){
  158. powi = 32;
  159. }else if(index == 6){
  160. powi = 64;
  161. }else if(index == 7){
  162. powi = 128;
  163. }else{
  164. return false;
  165. }
  166. int ndp = number / powi;
  167. return mod(float(ndp), 2.0) != 0.0;
  168. }
  169. /**
  170. * find the LOD at the point position
  171. */
  172. float getLOD(){
  173. vec3 offset = vec3(0.0, 0.0, 0.0);
  174. int iOffset = int(uVNStart);
  175. float depth = uLevel;
  176. for(float i = 0.0; i <= 30.0; i++){
  177. float nodeSizeAtLevel = uOctreeSize / pow(2.0, i + uLevel + 0.0);
  178. vec3 index3d = (position-offset) / nodeSizeAtLevel;
  179. index3d = floor(index3d + 0.5);
  180. int index = int(round(4.0 * index3d.x + 2.0 * index3d.y + index3d.z));
  181. vec4 value = texture2D(visibleNodes, vec2(float(iOffset) / 2048.0, 0.0));
  182. int mask = int(round(value.r * 255.0));
  183. if(isBitSet(mask, index)){
  184. // there are more visible child nodes at this position
  185. int advanceG = int(round(value.g * 255.0)) * 256;
  186. int advanceB = int(round(value.b * 255.0));
  187. int advanceChild = numberOfOnes(mask, index - 1);
  188. int advance = advanceG + advanceB + advanceChild;
  189. iOffset = iOffset + advance;
  190. depth++;
  191. }else{
  192. // no more visible child nodes at this position
  193. //return value.a * 255.0;
  194. float lodOffset = (255.0 * value.a) / 10.0 - 10.0;
  195. return depth + lodOffset;
  196. }
  197. offset = offset + (vec3(1.0, 1.0, 1.0) * nodeSizeAtLevel * 0.5) * index3d;
  198. }
  199. return depth;
  200. }
  201. float getSpacing(){
  202. vec3 offset = vec3(0.0, 0.0, 0.0);
  203. int iOffset = int(uVNStart);
  204. float depth = uLevel;
  205. float spacing = uNodeSpacing;
  206. for(float i = 0.0; i <= 30.0; i++){
  207. float nodeSizeAtLevel = uOctreeSize / pow(2.0, i + uLevel + 0.0);
  208. vec3 index3d = (position-offset) / nodeSizeAtLevel;
  209. index3d = floor(index3d + 0.5);
  210. int index = int(round(4.0 * index3d.x + 2.0 * index3d.y + index3d.z));
  211. vec4 value = texture2D(visibleNodes, vec2(float(iOffset) / 2048.0, 0.0));
  212. int mask = int(round(value.r * 255.0));
  213. float spacingFactor = value.a;
  214. if(i > 0.0){
  215. spacing = spacing / (255.0 * spacingFactor);
  216. }
  217. if(isBitSet(mask, index)){
  218. // there are more visible child nodes at this position
  219. int advanceG = int(round(value.g * 255.0)) * 256;
  220. int advanceB = int(round(value.b * 255.0));
  221. int advanceChild = numberOfOnes(mask, index - 1);
  222. int advance = advanceG + advanceB + advanceChild;
  223. iOffset = iOffset + advance;
  224. //spacing = spacing / (255.0 * spacingFactor);
  225. //spacing = spacing / 3.0;
  226. depth++;
  227. }else{
  228. // no more visible child nodes at this position
  229. return spacing;
  230. }
  231. offset = offset + (vec3(1.0, 1.0, 1.0) * nodeSizeAtLevel * 0.5) * index3d;
  232. }
  233. return spacing;
  234. }
  235. float getPointSizeAttenuation(){
  236. return pow(2.0, getLOD());
  237. }
  238. #endif
  239. // ---------------------
  240. // KD-TREE
  241. // ---------------------
  242. #if (defined(adaptive_point_size) || defined(color_type_level_of_detail)) && defined(tree_type_kdtree)
  243. float getLOD(){
  244. vec3 offset = vec3(0.0, 0.0, 0.0);
  245. float iOffset = 0.0;
  246. float depth = 0.0;
  247. vec3 size = uBBSize;
  248. vec3 pos = position;
  249. for(float i = 0.0; i <= 1000.0; i++){
  250. vec4 value = texture2D(visibleNodes, vec2(iOffset / 2048.0, 0.0));
  251. int children = int(value.r * 255.0);
  252. float next = value.g * 255.0;
  253. int split = int(value.b * 255.0);
  254. if(next == 0.0){
  255. return depth;
  256. }
  257. vec3 splitv = vec3(0.0, 0.0, 0.0);
  258. if(split == 1){
  259. splitv.x = 1.0;
  260. }else if(split == 2){
  261. splitv.y = 1.0;
  262. }else if(split == 4){
  263. splitv.z = 1.0;
  264. }
  265. iOffset = iOffset + next;
  266. float factor = length(pos * splitv / size);
  267. if(factor < 0.5){
  268. // left
  269. if(children == 0 || children == 2){
  270. return depth;
  271. }
  272. }else{
  273. // right
  274. pos = pos - size * splitv * 0.5;
  275. if(children == 0 || children == 1){
  276. return depth;
  277. }
  278. if(children == 3){
  279. iOffset = iOffset + 1.0;
  280. }
  281. }
  282. size = size * ((1.0 - (splitv + 1.0) / 2.0) + 0.5);
  283. depth++;
  284. }
  285. return depth;
  286. }
  287. float getPointSizeAttenuation(){
  288. return 0.5 * pow(1.3, getLOD());
  289. }
  290. #endif
  291. //
  292. // ### ######## ######## ######## #### ######## ## ## ######## ######## ######
  293. // ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
  294. // ## ## ## ## ## ## ## ## ## ## ## ## ## ##
  295. // ## ## ## ## ######## ## ######## ## ## ## ###### ######
  296. // ######### ## ## ## ## ## ## ## ## ## ## ## ##
  297. // ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
  298. // ## ## ## ## ## ## #### ######## ####### ## ######## ######
  299. //
  300. // formula adapted from: http://www.dfstudios.co.uk/articles/programming/image-programming-algorithms/image-processing-algorithms-part-5-contrast-adjustment/
  301. float getContrastFactor(float contrast){
  302. return (1.0158730158730156 * (contrast + 1.0)) / (1.0158730158730156 - contrast);
  303. }
  304. vec3 getRGB(){
  305. vec3 rgb = color;
  306. rgb = pow(rgb, vec3(uRGB_gbc.x));
  307. rgb = rgb + uRGB_gbc.y;
  308. rgb = (rgb - 0.5) * getContrastFactor(uRGB_gbc.z) + 0.5;
  309. rgb = clamp(rgb, 0.0, 1.0);
  310. return rgb;
  311. }
  312. float getIntensity(){
  313. float w = (intensity - intensityRange.x) / (intensityRange.y - intensityRange.x);
  314. w = pow(w, uIntensity_gbc.x);
  315. w = w + uIntensity_gbc.y;
  316. w = (w - 0.5) * getContrastFactor(uIntensity_gbc.z) + 0.5;
  317. w = clamp(w, 0.0, 1.0);
  318. return w;
  319. }
  320. vec3 getGpsTime(){
  321. float w = (gpsTime + uGpsOffset) * uGpsScale;
  322. vec3 c = texture2D(gradient, vec2(w, 1.0 - w)).rgb;
  323. // vec2 r = uNormalizedGpsBufferRange;
  324. // float w = gpsTime * (r.y - r.x) + r.x;
  325. // w = clamp(w, 0.0, 1.0);
  326. // vec3 c = texture2D(gradient, vec2(w,1.0-w)).rgb;
  327. return c;
  328. }
  329. vec3 getElevation(){
  330. vec4 world = modelMatrix * vec4( position, 1.0 );
  331. float w = (world.z - elevationRange.x) / (elevationRange.y - elevationRange.x);
  332. vec3 cElevation = texture2D(gradient, vec2(w,1.0-w)).rgb;
  333. return cElevation;
  334. }
  335. vec4 getClassification(){
  336. vec2 uv = vec2(classification / 255.0, 0.5);
  337. vec4 classColor = texture2D(classificationLUT, uv);
  338. return classColor;
  339. }
  340. vec3 getReturns(){
  341. // 0b 00_000_111
  342. float rn = mod(returnNumber, 8.0);
  343. // 0b 00_111_000
  344. float nr = mod(returnNumber / 8.0, 8.0);
  345. if(nr <= 1.0){
  346. return vec3(1.0, 0.0, 0.0);
  347. }else{
  348. return vec3(0.0, 1.0, 0.0);
  349. }
  350. // return vec3(nr / 4.0, 0.0, 0.0);
  351. // if(nr == 1.0){
  352. // return vec3(1.0, 1.0, 0.0);
  353. // }else{
  354. // if(rn == 1.0){
  355. // return vec3(1.0, 0.0, 0.0);
  356. // }else if(rn == nr){
  357. // return vec3(0.0, 0.0, 1.0);
  358. // }else{
  359. // return vec3(0.0, 1.0, 0.0);
  360. // }
  361. // }
  362. // if(numberOfReturns == 1.0){
  363. // return vec3(1.0, 1.0, 0.0);
  364. // }else{
  365. // if(returnNumber == 1.0){
  366. // return vec3(1.0, 0.0, 0.0);
  367. // }else if(returnNumber == numberOfReturns){
  368. // return vec3(0.0, 0.0, 1.0);
  369. // }else{
  370. // return vec3(0.0, 1.0, 0.0);
  371. // }
  372. // }
  373. }
  374. vec3 getReturnNumber(){
  375. if(numberOfReturns == 1.0){
  376. return vec3(1.0, 1.0, 0.0);
  377. }else{
  378. if(returnNumber == 1.0){
  379. return vec3(1.0, 0.0, 0.0);
  380. }else if(returnNumber == numberOfReturns){
  381. return vec3(0.0, 0.0, 1.0);
  382. }else{
  383. return vec3(0.0, 1.0, 0.0);
  384. }
  385. }
  386. }
  387. vec3 getNumberOfReturns(){
  388. float value = numberOfReturns;
  389. float w = value / 6.0;
  390. vec3 color = texture2D(gradient, vec2(w, 1.0 - w)).rgb;
  391. return color;
  392. }
  393. vec3 getSourceID(){
  394. float w = mod(pointSourceID, 10.0) / 10.0;
  395. return texture2D(gradient, vec2(w,1.0 - w)).rgb;
  396. }
  397. vec3 getCompositeColor(){
  398. vec3 c;
  399. float w;
  400. c += wRGB * getRGB();
  401. w += wRGB;
  402. c += wIntensity * getIntensity() * vec3(1.0, 1.0, 1.0);
  403. w += wIntensity;
  404. c += wElevation * getElevation();
  405. w += wElevation;
  406. c += wReturnNumber * getReturnNumber();
  407. w += wReturnNumber;
  408. c += wSourceID * getSourceID();
  409. w += wSourceID;
  410. vec4 cl = wClassification * getClassification();
  411. c += cl.a * cl.rgb;
  412. w += wClassification * cl.a;
  413. c = c / w;
  414. if(w == 0.0){
  415. //c = color;
  416. gl_Position = vec4(100.0, 100.0, 100.0, 0.0);
  417. }
  418. return c;
  419. }
  420. vec3 getNormal(){
  421. //vec3 n_hsv = vec3( modelMatrix * vec4( normal, 0.0 )) * 0.5 + 0.5; // (n_world.xyz + vec3(1.,1.,1.)) / 2.;
  422. vec3 n_view = normalize( vec3(modelViewMatrix * vec4( normal, 0.0 )) );
  423. return n_view;
  424. }
  425. bool applyBackfaceCulling() {
  426. // Black not facing vertices / Backface culling
  427. vec3 e = normalize(vec3(modelViewMatrix * vec4( position, 1. )));
  428. vec3 n = getNormal(); // normalize( vec3(modelViewMatrix * vec4( normal, 0.0 )) );
  429. if((uUseOrthographicCamera && n.z <= 0.) || (!uUseOrthographicCamera && dot( n, e ) >= 0.)) {
  430. return true;
  431. } else {
  432. return false;
  433. }
  434. }
  435. #if defined(color_type_matcap)
  436. // Matcap Material
  437. vec3 getMatcap(){
  438. vec3 eye = normalize( vec3( modelViewMatrix * vec4( position, 1. ) ) );
  439. if(uUseOrthographicCamera) {
  440. eye = vec3(0., 0., -1.);
  441. }
  442. vec3 r_en = reflect( eye, getNormal() ); // or r_en = e - 2. * dot( n, e ) * n;
  443. float m = 2. * sqrt(pow( r_en.x, 2. ) + pow( r_en.y, 2. ) + pow( r_en.z + 1., 2. ));
  444. vec2 vN = r_en.xy / m + .5;
  445. return texture2D(matcapTextureUniform, vN).rgb;
  446. }
  447. #endif
  448. vec3 getExtra(){
  449. float w = (aExtra + uExtraOffset) * uExtraScale;
  450. w = clamp(w, 0.0, 1.0);
  451. vec3 color = texture2D(gradient, vec2(w,1.0-w)).rgb;
  452. // vec2 r = uExtraNormalizedRange;
  453. // float w = aExtra * (r.y - r.x) + r.x;
  454. // w = (w - uExtraRange.x) / (uExtraRange.y - uExtraRange.x);
  455. // w = clamp(w, 0.0, 1.0);
  456. // vec3 color = texture2D(gradient, vec2(w,1.0-w)).rgb;
  457. return color;
  458. }
  459. vec3 getColor(){
  460. vec3 color;
  461. #ifdef color_type_rgba
  462. color = getRGB();
  463. #elif defined color_type_height || defined color_type_elevation
  464. color = getElevation();
  465. #elif defined color_type_rgb_height
  466. vec3 cHeight = getElevation();
  467. color = (1.0 - uTransition) * getRGB() + uTransition * cHeight;
  468. #elif defined color_type_depth
  469. float linearDepth = gl_Position.w;
  470. float expDepth = (gl_Position.z / gl_Position.w) * 0.5 + 0.5;
  471. color = vec3(linearDepth, expDepth, 0.0);
  472. //color = vec3(1.0, 0.5, 0.3);
  473. #elif defined color_type_intensity
  474. float w = getIntensity();
  475. color = vec3(w, w, w);
  476. #elif defined color_type_gps_time
  477. color = getGpsTime();
  478. #elif defined color_type_intensity_gradient
  479. float w = getIntensity();
  480. color = texture2D(gradient, vec2(w,1.0-w)).rgb;
  481. #elif defined color_type_color
  482. color = uColor;
  483. #elif defined color_type_level_of_detail
  484. float depth = getLOD();
  485. float w = depth / 10.0;
  486. color = texture2D(gradient, vec2(w,1.0-w)).rgb;
  487. #elif defined color_type_indices
  488. color = indices.rgb;
  489. #elif defined color_type_classification
  490. vec4 cl = getClassification();
  491. color = cl.rgb;
  492. #elif defined color_type_return_number
  493. color = getReturnNumber();
  494. #elif defined color_type_returns
  495. color = getReturns();
  496. #elif defined color_type_number_of_returns
  497. color = getNumberOfReturns();
  498. #elif defined color_type_source_id
  499. color = getSourceID();
  500. #elif defined color_type_point_source_id
  501. color = getSourceID();
  502. #elif defined color_type_normal
  503. color = (modelMatrix * vec4(normal, 0.0)).xyz;
  504. #elif defined color_type_phong
  505. color = color;
  506. #elif defined color_type_composite
  507. color = getCompositeColor();
  508. #elif defined color_type_matcap
  509. color = getMatcap();
  510. #else
  511. color = getExtra();
  512. #endif
  513. if (backfaceCulling && applyBackfaceCulling()) {
  514. color = vec3(0.);
  515. }
  516. return color;
  517. }
  518. float getPointSize(){
  519. float pointSize = 1.0;
  520. float slope = tan(fov / 2.0);
  521. float projFactor = -0.5 * uScreenHeight / (slope * vViewPosition.z);
  522. float scale = length(
  523. modelViewMatrix * vec4(0, 0, 0, 1) -
  524. modelViewMatrix * vec4(uOctreeSpacing, 0, 0, 1)
  525. ) / uOctreeSpacing;
  526. projFactor = projFactor * scale;
  527. float r = uOctreeSpacing * 1.7;
  528. vRadius = r;
  529. #if defined fixed_point_size
  530. pointSize = size;
  531. #elif defined attenuated_point_size
  532. if(uUseOrthographicCamera){
  533. pointSize = size;
  534. }else{
  535. pointSize = size * spacing * projFactor;
  536. //pointSize = pointSize * projFactor;
  537. }
  538. #elif defined adaptive_point_size
  539. if(uUseOrthographicCamera) {
  540. float worldSpaceSize = 1.0 * size * r / getPointSizeAttenuation();
  541. pointSize = (worldSpaceSize / uOrthoWidth) * uScreenWidth;
  542. } else {
  543. float worldSpaceSize = 1.0 * size * r / getPointSizeAttenuation();
  544. pointSize = worldSpaceSize * projFactor;
  545. }
  546. #endif
  547. pointSize = max(minSize, pointSize);
  548. pointSize = min(maxSize, pointSize);
  549. vRadius = pointSize / projFactor;
  550. return pointSize;
  551. }
  552. #if defined(num_clippolygons) && num_clippolygons > 0
  553. bool pointInClipPolygon(vec3 point, int polyIdx) {
  554. mat4 wvp = uClipPolygonWVP[polyIdx];
  555. //vec4 screenClipPos = uClipPolygonVP[polyIdx] * modelMatrix * vec4(point, 1.0);
  556. //screenClipPos.xy = screenClipPos.xy / screenClipPos.w * 0.5 + 0.5;
  557. vec4 pointNDC = wvp * vec4(point, 1.0);
  558. pointNDC.xy = pointNDC.xy / pointNDC.w;
  559. int j = uClipPolygonVCount[polyIdx] - 1;
  560. bool c = false;
  561. for(int i = 0; i < 8; i++) {
  562. if(i == uClipPolygonVCount[polyIdx]) {
  563. break;
  564. }
  565. //vec4 verti = wvp * vec4(uClipPolygonVertices[polyIdx * 8 + i], 1);
  566. //vec4 vertj = wvp * vec4(uClipPolygonVertices[polyIdx * 8 + j], 1);
  567. //verti.xy = verti.xy / verti.w;
  568. //vertj.xy = vertj.xy / vertj.w;
  569. //verti.xy = verti.xy / verti.w * 0.5 + 0.5;
  570. //vertj.xy = vertj.xy / vertj.w * 0.5 + 0.5;
  571. vec3 verti = uClipPolygonVertices[polyIdx * 8 + i];
  572. vec3 vertj = uClipPolygonVertices[polyIdx * 8 + j];
  573. if( ((verti.y > pointNDC.y) != (vertj.y > pointNDC.y)) &&
  574. (pointNDC.x < (vertj.x-verti.x) * (pointNDC.y-verti.y) / (vertj.y-verti.y) + verti.x) ) {
  575. c = !c;
  576. }
  577. j = i;
  578. }
  579. return c;
  580. }
  581. #endif
  582. void doClipping(){
  583. {
  584. vec4 cl = getClassification();
  585. if(cl.a == 0.0){
  586. gl_Position = vec4(100.0, 100.0, 100.0, 0.0);
  587. return;
  588. }
  589. }
  590. #if defined(clip_return_number_enabled)
  591. { // return number filter
  592. vec2 range = uFilterReturnNumberRange;
  593. if(returnNumber < range.x || returnNumber > range.y){
  594. gl_Position = vec4(100.0, 100.0, 100.0, 0.0);
  595. return;
  596. }
  597. }
  598. #endif
  599. #if defined(clip_number_of_returns_enabled)
  600. { // number of return filter
  601. vec2 range = uFilterNumberOfReturnsRange;
  602. if(numberOfReturns < range.x || numberOfReturns > range.y){
  603. gl_Position = vec4(100.0, 100.0, 100.0, 0.0);
  604. return;
  605. }
  606. }
  607. #endif
  608. #if defined(clip_gps_enabled)
  609. { // GPS time filter
  610. float time = (gpsTime + uGpsOffset) * uGpsScale;
  611. vec2 range = uFilterGPSTimeClipRange;
  612. if(time < range.x || time > range.y){
  613. gl_Position = vec4(100.0, 100.0, 100.0, 0.0);
  614. return;
  615. }
  616. }
  617. #endif
  618. #if defined(clip_point_source_id_enabled)
  619. { // point source id filter
  620. vec2 range = uFilterPointSourceIDClipRange;
  621. if(pointSourceID < range.x || pointSourceID > range.y){
  622. gl_Position = vec4(100.0, 100.0, 100.0, 0.0);
  623. return;
  624. }
  625. }
  626. #endif
  627. int clipVolumesCount = 0;
  628. int insideCount = 0;
  629. #if defined(num_clipboxes) && num_clipboxes > 0
  630. for(int i = 0; i < num_clipboxes; i++){
  631. vec4 clipPosition = clipBoxes[i] * modelMatrix * vec4( position, 1.0 );
  632. bool inside = -0.5 <= clipPosition.x && clipPosition.x <= 0.5;
  633. inside = inside && -0.5 <= clipPosition.y && clipPosition.y <= 0.5;
  634. inside = inside && -0.5 <= clipPosition.z && clipPosition.z <= 0.5;
  635. insideCount = insideCount + (inside ? 1 : 0);
  636. clipVolumesCount++;
  637. }
  638. #endif
  639. #if defined(num_clippolygons) && num_clippolygons > 0
  640. for(int i = 0; i < num_clippolygons; i++) {
  641. bool inside = pointInClipPolygon(position, i);
  642. insideCount = insideCount + (inside ? 1 : 0);
  643. clipVolumesCount++;
  644. }
  645. #endif
  646. bool insideAny = insideCount > 0;
  647. bool insideAll = (clipVolumesCount > 0) && (clipVolumesCount == insideCount);
  648. if(clipMethod == CLIPMETHOD_INSIDE_ANY){
  649. if(insideAny && clipTask == CLIPTASK_HIGHLIGHT){
  650. vColor.r += 0.5;
  651. }else if(!insideAny && clipTask == CLIPTASK_SHOW_INSIDE){
  652. gl_Position = vec4(100.0, 100.0, 100.0, 1.0);
  653. }else if(insideAny && clipTask == CLIPTASK_SHOW_OUTSIDE){
  654. gl_Position = vec4(100.0, 100.0, 100.0, 1.0);
  655. }
  656. }else if(clipMethod == CLIPMETHOD_INSIDE_ALL){
  657. if(insideAll && clipTask == CLIPTASK_HIGHLIGHT){
  658. vColor.r += 0.5;
  659. }else if(!insideAll && clipTask == CLIPTASK_SHOW_INSIDE){
  660. gl_Position = vec4(100.0, 100.0, 100.0, 1.0);
  661. }else if(insideAll && clipTask == CLIPTASK_SHOW_OUTSIDE){
  662. gl_Position = vec4(100.0, 100.0, 100.0, 1.0);
  663. }
  664. }
  665. }
  666. //
  667. // ## ## ### #### ## ##
  668. // ### ### ## ## ## ### ##
  669. // #### #### ## ## ## #### ##
  670. // ## ### ## ## ## ## ## ## ##
  671. // ## ## ######### ## ## ####
  672. // ## ## ## ## ## ## ###
  673. // ## ## ## ## #### ## ##
  674. //
  675. void main() {
  676. vec4 mvPosition = modelViewMatrix * vec4(position, 1.0 );
  677. vViewPosition = mvPosition.xyz;
  678. gl_Position = projectionMatrix * mvPosition;
  679. vLogDepth = log2(-mvPosition.z);
  680. //gl_Position = vec4(0.0, 0.0, 0.0, 1.0);
  681. //gl_PointSize = 5.0;
  682. // POINT SIZE
  683. float pointSize = getPointSize();
  684. //float pointSize = 2.0;
  685. gl_PointSize = pointSize;
  686. vPointSize = pointSize;
  687. // COLOR
  688. vColor = getColor();
  689. // vColor = vec3(1.0, 0.0, 0.0);
  690. //gl_Position = vec4(0.0, 0.0, 0.0, 1.0);
  691. //gl_Position = vec4(position.xzy / 1000.0, 1.0 );
  692. //gl_PointSize = 5.0;
  693. //vColor = vec3(1.0, 1.0, 1.0);
  694. // only for "replacing" approaches
  695. // if(getLOD() != uLevel){
  696. // gl_Position = vec4(10.0, 10.0, 10.0, 1.0);
  697. // }
  698. #if defined hq_depth_pass
  699. float originalDepth = gl_Position.w;
  700. float adjustedDepth = originalDepth + 2.0 * vRadius;
  701. float adjust = adjustedDepth / originalDepth;
  702. mvPosition.xyz = mvPosition.xyz * adjust;
  703. gl_Position = projectionMatrix * mvPosition;
  704. #endif
  705. // CLIPPING
  706. doClipping();
  707. #if defined(num_clipspheres) && num_clipspheres > 0
  708. for(int i = 0; i < num_clipspheres; i++){
  709. vec4 sphereLocal = uClipSpheres[i] * mvPosition;
  710. float distance = length(sphereLocal.xyz);
  711. if(distance < 1.0){
  712. float w = distance;
  713. vec3 cGradient = texture2D(gradient, vec2(w, 1.0 - w)).rgb;
  714. vColor = cGradient;
  715. //vColor = cGradient * 0.7 + vColor * 0.3;
  716. }
  717. }
  718. #endif
  719. #if defined(num_shadowmaps) && num_shadowmaps > 0
  720. const float sm_near = 0.1;
  721. const float sm_far = 10000.0;
  722. for(int i = 0; i < num_shadowmaps; i++){
  723. vec3 viewPos = (uShadowWorldView[i] * vec4(position, 1.0)).xyz;
  724. float distanceToLight = abs(viewPos.z);
  725. vec4 projPos = uShadowProj[i] * uShadowWorldView[i] * vec4(position, 1);
  726. vec3 nc = projPos.xyz / projPos.w;
  727. float u = nc.x * 0.5 + 0.5;
  728. float v = nc.y * 0.5 + 0.5;
  729. vec2 sampleStep = vec2(1.0 / (2.0*1024.0), 1.0 / (2.0*1024.0)) * 1.5;
  730. vec2 sampleLocations[9];
  731. sampleLocations[0] = vec2(0.0, 0.0);
  732. sampleLocations[1] = sampleStep;
  733. sampleLocations[2] = -sampleStep;
  734. sampleLocations[3] = vec2(sampleStep.x, -sampleStep.y);
  735. sampleLocations[4] = vec2(-sampleStep.x, sampleStep.y);
  736. sampleLocations[5] = vec2(0.0, sampleStep.y);
  737. sampleLocations[6] = vec2(0.0, -sampleStep.y);
  738. sampleLocations[7] = vec2(sampleStep.x, 0.0);
  739. sampleLocations[8] = vec2(-sampleStep.x, 0.0);
  740. float visibleSamples = 0.0;
  741. float numSamples = 0.0;
  742. float bias = vRadius * 2.0;
  743. for(int j = 0; j < 9; j++){
  744. vec4 depthMapValue = texture2D(uShadowMap[i], vec2(u, v) + sampleLocations[j]);
  745. float linearDepthFromSM = depthMapValue.x + bias;
  746. float linearDepthFromViewer = distanceToLight;
  747. if(linearDepthFromSM > linearDepthFromViewer){
  748. visibleSamples += 1.0;
  749. }
  750. numSamples += 1.0;
  751. }
  752. float visibility = visibleSamples / numSamples;
  753. if(u < 0.0 || u > 1.0 || v < 0.0 || v > 1.0 || nc.x < -1.0 || nc.x > 1.0 || nc.y < -1.0 || nc.y > 1.0 || nc.z < -1.0 || nc.z > 1.0){
  754. //vColor = vec3(0.0, 0.0, 0.2);
  755. }else{
  756. //vColor = vec3(1.0, 1.0, 1.0) * visibility + vec3(1.0, 1.0, 1.0) * vec3(0.5, 0.0, 0.0) * (1.0 - visibility);
  757. vColor = vColor * visibility + vColor * uShadowColor * (1.0 - visibility);
  758. }
  759. }
  760. #endif
  761. }