Pattern.glsl.js 8.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133
  1. /*
  2. All material copyright ESRI, All Rights Reserved, unless otherwise specified.
  3. See https://js.arcgis.com/4.25/esri/copyright.txt for details.
  4. */
  5. import{addNearFar as e,addLinearDepth as o}from"../views/3d/webgl-engine/core/shaderLibrary/ForwardLinearDepth.glsl.js";import{ShaderOutput as t}from"../views/3d/webgl-engine/core/shaderLibrary/ShaderOutput.js";import{SliceDraw as r}from"../views/3d/webgl-engine/core/shaderLibrary/Slice.glsl.js";import{Transform as a}from"../views/3d/webgl-engine/core/shaderLibrary/Transform.glsl.js";import{VertexColor as i}from"../views/3d/webgl-engine/core/shaderLibrary/attributes/VertexColor.glsl.js";import{OutputDepth as n}from"../views/3d/webgl-engine/core/shaderLibrary/output/OutputDepth.glsl.js";import{OutputHighlight as l}from"../views/3d/webgl-engine/core/shaderLibrary/output/OutputHighlight.glsl.js";import{multipassTerrainTest as c}from"../views/3d/webgl-engine/core/shaderLibrary/shading/MultipassTerrainTest.glsl.js";import{symbolAlphaCutoff as d}from"../views/3d/webgl-engine/core/shaderLibrary/util/AlphaCutoff.js";import{ColorConversion as s}from"../views/3d/webgl-engine/core/shaderLibrary/util/ColorConversion.glsl.js";import{addProjViewLocalOrigin as p,addCameraPosition as g}from"../views/3d/webgl-engine/core/shaderLibrary/util/View.glsl.js";import{Float4PassUniform as u}from"../views/3d/webgl-engine/core/shaderModules/Float4PassUniform.js";import{FloatPassUniform as v}from"../views/3d/webgl-engine/core/shaderModules/FloatPassUniform.js";import{glsl as m}from"../views/3d/webgl-engine/core/shaderModules/interfaces.js";import{ShaderBuilder as f}from"../views/3d/webgl-engine/core/shaderModules/ShaderBuilder.js";import{TransparencyPassType as h}from"../views/3d/webgl-engine/lib/TransparencyPassType.js";import{VertexAttribute as w}from"../views/3d/webgl-engine/lib/VertexAttribute.js";import{Style as b}from"../views/3d/webgl-engine/materials/PatternStyle.js";const y=.70710678118,S=y,j=.08715574274;function x(x){const T=new f,C=x.hasMultipassTerrain&&(x.output===t.Color||x.output===t.Alpha);x.draped||T.extensions.add("GL_OES_standard_derivatives");const{vertex:R,fragment:D}=T;p(R,x),T.include(a,x),T.include(i,x),x.draped?R.uniforms.add(new v("worldToScreenRatio",((e,o)=>1/o.screenToPCSRatio))):T.attributes.add(w.BOUNDINGRECT,"mat3"),T.attributes.add(w.POSITION,"vec3"),T.attributes.add(w.UVMAPSPACE,"vec4"),T.varyings.add("vpos","vec3"),T.varyings.add("vuv","vec2"),C&&T.varyings.add("depth","float");const $=x.style===b.ForwardDiagonal||x.style===b.BackwardDiagonal||x.style===b.DiagonalCross;$&&R.code.add(m`
  6. const mat2 rotate45 = mat2(${m.float(y)}, ${m.float(-S)},
  7. ${m.float(S)}, ${m.float(y)});
  8. `),x.draped||(g(R,x),R.uniforms.add(new v("worldToScreenPerDistanceRatio",((e,o)=>1/o.camera.perScreenPixelRatio))),R.code.add(m`vec3 projectPointToLineSegment(vec3 center, vec3 halfVector, vec3 point) {
  9. float projectedLength = dot(halfVector, point - center) / dot(halfVector, halfVector);
  10. return center + halfVector * clamp(projectedLength, -1.0, 1.0);
  11. }`),R.code.add(m`vec3 intersectRayPlane(vec3 rayDir, vec3 rayOrigin, vec3 planeNormal, vec3 planePoint) {
  12. float d = dot(planeNormal, planePoint);
  13. float t = (d - dot(planeNormal, rayOrigin)) / dot(planeNormal, rayDir);
  14. return rayOrigin + t * rayDir;
  15. }`),R.code.add(m`
  16. float boundingRectDistanceToCamera() {
  17. vec3 center = vec3(boundingRect[0][0], boundingRect[0][1], boundingRect[0][2]);
  18. vec3 halfU = vec3(boundingRect[1][0], boundingRect[1][1], boundingRect[1][2]);
  19. vec3 halfV = vec3(boundingRect[2][0], boundingRect[2][1], boundingRect[2][2]);
  20. vec3 n = normalize(cross(halfU, halfV));
  21. vec3 viewDir = - vec3(view[0][2], view[1][2], view[2][2]);
  22. float viewAngle = dot(viewDir, n);
  23. float minViewAngle = ${m.float(j)};
  24. if (abs(viewAngle) < minViewAngle) {
  25. // view direction is (almost) parallel to plane -> clamp it to min angle
  26. float normalComponent = sign(viewAngle) * minViewAngle - viewAngle;
  27. viewDir = normalize(viewDir + normalComponent * n);
  28. }
  29. // intersect view direction with infinite plane that contains bounding rect
  30. vec3 planeProjected = intersectRayPlane(viewDir, cameraPosition, n, center);
  31. // clip to bounds by projecting to u and v line segments individually
  32. vec3 uProjected = projectPointToLineSegment(center, halfU, planeProjected);
  33. vec3 vProjected = projectPointToLineSegment(center, halfV, planeProjected);
  34. // use to calculate the closest point to camera on bounding rect
  35. vec3 closestPoint = uProjected + vProjected - center;
  36. return length(closestPoint - cameraPosition);
  37. }
  38. `)),R.code.add(m`
  39. vec2 scaledUV() {
  40. vec2 uv = uvMapSpace.xy ${$?" * rotate45":""};
  41. vec2 uvCellOrigin = uvMapSpace.zw ${$?" * rotate45":""};
  42. ${x.draped?"":m`
  43. float distanceToCamera = boundingRectDistanceToCamera();
  44. float worldToScreenRatio = worldToScreenPerDistanceRatio / distanceToCamera;
  45. `}
  46. // Logarithmically discretize ratio to avoid jittering
  47. float step = 0.1;
  48. float discreteWorldToScreenRatio = log(worldToScreenRatio);
  49. discreteWorldToScreenRatio = ceil(discreteWorldToScreenRatio / step) * step;
  50. discreteWorldToScreenRatio = exp(discreteWorldToScreenRatio);
  51. vec2 uvOffset = mod(uvCellOrigin * discreteWorldToScreenRatio, ${m.float(x.patternSpacing)});
  52. return uvOffset + (uv * discreteWorldToScreenRatio);
  53. }
  54. `);const V=x.output===t.Depth;return V&&(T.include(n,x),e(T),o(T)),R.code.add(m`
  55. void main(void) {
  56. vuv = scaledUV();
  57. vpos = position;
  58. ${C?"depth = (view * vec4(vpos, 1.0)).z;":""}
  59. forwardNormalizedVertexColor();
  60. gl_Position = ${V?m`transformPositionWithDepth(proj, view, vpos, nearFar, linearDepth);`:m`transformPosition(proj, view, vpos);`}
  61. }
  62. `),T.include(r,x),D.include(s),x.draped&&D.uniforms.add(new v("texelSize",((e,o)=>1/o.camera.pixelRatio))),x.output===t.Highlight&&T.include(l,x),C&&T.include(c,x),x.output!==t.Highlight&&(D.code.add(m`
  63. const float lineWidth = ${m.float(x.lineWidth)};
  64. const float spacing = ${m.float(x.patternSpacing)};
  65. const float spacingINV = ${m.float(1/x.patternSpacing)};
  66. float coverage(float p, float txlSize) {
  67. p = mod(p, spacing);
  68. float halfTxlSize = txlSize / 2.0;
  69. float start = p - halfTxlSize;
  70. float end = p + halfTxlSize;
  71. float coverage = (ceil(end * spacingINV) - floor(start * spacingINV)) * lineWidth;
  72. coverage -= min(lineWidth, mod(start, spacing));
  73. coverage -= max(lineWidth - mod(end, spacing), 0.0);
  74. return coverage / txlSize;
  75. }
  76. `),x.draped||D.code.add(m`const int maxSamples = 5;
  77. float sample(float p) {
  78. vec2 dxdy = abs(vec2(dFdx(p), dFdy(p)));
  79. float fwidth = dxdy.x + dxdy.y;
  80. ivec2 samples = 1 + ivec2(clamp(dxdy, 0.0, float(maxSamples - 1)));
  81. vec2 invSamples = 1.0 / vec2(samples);
  82. float accumulator = 0.0;
  83. for (int j = 0; j < maxSamples; j++) {
  84. if(j >= samples.y) {
  85. break;
  86. }
  87. for (int i = 0; i < maxSamples; i++) {
  88. if(i >= samples.x) {
  89. break;
  90. }
  91. vec2 step = vec2(i,j) * invSamples - 0.5;
  92. accumulator += coverage(p + step.x * dxdy.x + step.y * dxdy.y, fwidth);
  93. }
  94. }
  95. accumulator /= float(samples.x * samples.y);
  96. return accumulator;
  97. }`)),D.uniforms.add(new u("uColor",(e=>e.color))),D.code.add(m`
  98. void main() {
  99. discardBySlice(vpos);
  100. ${C?"terrainDepthTest(gl_FragCoord, depth);":""}
  101. vec4 color = ${x.hasVertexColors?"vColor * uColor;":"uColor;"}
  102. color = highlightSlice(color, vpos);
  103. ${x.output!==t.Highlight?m`color.a *= ${P(x)};`:""}
  104. if (color.a < ${m.float(d)}) {
  105. discard;
  106. }
  107. ${x.output===t.Alpha?m`gl_FragColor = vec4(color.a);`:""}
  108. ${x.output===t.Color?m`gl_FragColor = color; ${x.transparencyPassType===h.Color?"gl_FragColor = premultiplyAlpha(gl_FragColor);":""}`:""}
  109. ${x.output===t.Highlight?m`outputHighlight();`:""}
  110. ${x.output===t.Depth?m`outputDepth(linearDepth);`:""};
  111. }
  112. `),T}function P(e){function o(o){return e.draped?m`coverage(vuv.${o}, texelSize)`:m`sample(vuv.${o})`}switch(e.style){case b.ForwardDiagonal:case b.Horizontal:return o("y");case b.BackwardDiagonal:case b.Vertical:return o("x");case b.DiagonalCross:case b.Cross:return m`
  113. 1.0 - (1.0 - ${o("x")}) * (1.0 - ${o("y")})
  114. `;default:return"0.0"}}const T=Object.freeze(Object.defineProperty({__proto__:null,build:x},Symbol.toStringTag,{value:"Module"}));export{T as P,x as b};