-
Notifications
You must be signed in to change notification settings - Fork 0
/
D_nns_way.html
1146 lines (955 loc) · 46.6 KB
/
D_nns_way.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
<html>
<!--
Adding Brandon Jones and Colin MacKenzie IV's gl-matrix library that provides easy to
use vector and matrix maths operations. Source: https://github.com/toji/gl-matrix
-->
<script type="text/javascript" src="gl-matrix.js"></script>
<!-- Main WebGL functions -->
<script type="text/javascript">
//
// Space for some global variables
//
// This variable stores whether the image that is currently on screen is correct or not.
// The volume rendering is only triggered if this value is set to 'true' in order to save
// computational resources that would be wasted recreating the same image over and over
// again. This value is set to true by any user input that modifies the rendering
let RenderingIsDirty = true;
let TransferFunctionIsDirty = true;
//
// Shader codes
//
// OBS: For all shaders, "#version 300 es" has to be the first token in the source, so
//
// var vertexSource = `
// #version 300 es
//
// would not work as there is a \n after `
//
// The vertex shader contains hardcorded positions for a generic bounding box. The box is
// created as a unit cube from -1 to 1. The box will then be scaled to the correct size
// and proportions using the model matrix in case we are looking at a non-cube dataset.
// We are building a cube as a bounding box:
//
// 7---------------------6
// /| /|
// / | / |
// / | / |
// / | / |
// 3----+----------------2 |
// | | | |
// | | | |
// | 4----------------+----5
// | / | /
// | / | /
// | / | /
// |/ |/
// 0---------------------1
//
// y
// ^
// | z
// | /
// |/
// o-----> x
//
// So we are generating the triangles:
// (0, 2, 1), (0, 3, 2) for the front side
// (1, 6, 5), (1, 2, 6) for the right side
// (4, 3, 0), (4, 7, 3) for the left side
// (4, 6, 7), (4, 5, 6) for the back side
// (3, 6, 2), (3, 7, 6) for the top side
// (1, 4, 0), (1, 5, 4) for the bottom side
const boundingBoxVertexSource = `#version 300 es
// WebGL2 requires specifying the floating point precision once per program object
precision highp float;
#line 77 // This sets the line numbers to match with the line numbers in this file
// Hard-code all of the vertices for a 6-face cube centered on 0 with a side-length of 1
const vec3 p0 = vec3(-1.0, -1.0, -1.0);
const vec3 p1 = vec3( 1.0, -1.0, -1.0);
const vec3 p2 = vec3( 1.0, 1.0, -1.0);
const vec3 p3 = vec3(-1.0, 1.0, -1.0);
const vec3 p4 = vec3(-1.0, -1.0, 1.0);
const vec3 p5 = vec3( 1.0, -1.0, 1.0);
const vec3 p6 = vec3( 1.0, 1.0, 1.0);
const vec3 p7 = vec3(-1.0, 1.0, 1.0);
// 6 faces * 2 triangles/face * 3 vertices/triangles = 36 vertices
const vec3 positions[36] = vec3[](
p0, p2, p1, p0, p3, p2, // front side
p1, p6, p5, p1, p2, p6, // right side
p4, p3, p0, p4, p7, p3, // left side
p4, p6, p7, p4, p5, p6, // back side
p3, p6, p2, p3, p7, p6, // top side
p1, p4, p0, p1, p5, p4 // bottom side
);
// Specifies the varying variable that stores the position of the vertex. The value of
// this variable will be interpolated in the fragment shader
out vec3 position;
// The model matrix specifies the transformation for the current bounding box
uniform mat4 modelMatrix;
// The view matrix specifies information about the location of the virtual camera
uniform mat4 viewMatrix;
// The projection matrix determines the projection and its parameters, like FOV
uniform mat4 projectionMatrix;
void main() {
// gl_VertexID is a library-defined variable that corresponds to the number of the
// vertex for which the vertex shader is currently being evaluated
vec4 p = vec4(positions[gl_VertexID], 1.0);
// gl_Position is a library-defined variable that needs to be set by the vertex shader
gl_Position = projectionMatrix * viewMatrix * modelMatrix * p;
// Just passing the value along for the fragment shader to interpolate the value
// between the vertices
position = p.xyz;
}
`;
const boundingBoxFragmentSource = `#version 300 es
// WebGL2 requires specifying the floating point precision once per program object
precision highp float;
#line 126 // This sets the line numbers to match with the line numbers in this file
// Incoming varying variable from the vertex shader
in vec3 position;
// Define the output variable as a vec4 (= color)
out vec4 out_color;
void main() {
// Using the position as the red and green components of the color since the positions
// are in [-1, 1] and the colors are in [0, 1], we need to renormalize here
vec3 normPos = (position + vec3(1.0)) / vec3(2.0);
out_color = vec4(normPos, 1.0);
}
`;
// The vertex shader hardcodes the screen-aligned quad that is used to trigger the
// fragment shader.
//
// We are building a screen-aligned quad:
//
// 3--------------------------2
// | |
// | |
// | |
// | |
// | |
// 0--------------------------1
//
// y
// ^
// |
// |
// |
// o-----> x
// So we are generating the triangles: (0, 1, 2), (1, 2, 3)
const volumeRenderingVertexSource = `#version 300 es
// WebGL2 requires specifying the floating point precision once per program object
precision highp float;
#line 167 // This sets the line numbers to match with the line numbers in this file
const vec2 p0 = vec2(-1.0, -1.0);
const vec2 p1 = vec2( 1.0, -1.0);
const vec2 p2 = vec2( 1.0, 1.0);
const vec2 p3 = vec2(-1.0, 1.0);
// 1 quad * 2 triangles / quad * 3 vertices / triangle = 6 vertices
const vec2 positions[6] = vec2[](p0, p1, p2, p0, p2, p3);
// This varying variable represents the texture coordinates that are used for the rays
out vec2 st;
void main() {
// gl_VertexID is a library-defined variable that corresponds to the number of the
// vertex for which the vertex shader is currently being evaluated
vec2 p = positions[gl_VertexID];
// We can use the position here directly
gl_Position = vec4(p, 0.0, 1.0);
// The positions are in range [-1, 1], but the texture coordinates should be [0, 1]
st = (p + vec2(1.0)) / vec2(2.0);
}
`;
const volumeRenderingFragmentSource = `#version 300 es
// WebGL2 requires specifying the floating point precision once per program object
precision highp float;
precision highp sampler2D;
precision highp sampler3D;
#line 197 // This sets the line numbers to match with the line numbers in this file
uniform sampler2D entryPoints; // The texture that holds the entry points
uniform sampler2D exitPoints; // The texture that holds the exit points
uniform sampler3D volume; // The texture that holds the volume data
uniform sampler2D transferFunction; // The texture that holds the transfer function data
// WebGL doesn't like 1D textures, so this is a 2D
// texture that is only 1 pixel high
uniform float stepSize; // The ray step size as determined by the user
uniform int renderType; // The value of the 'Rendering output' parameter
uniform int compositingMethod; // The value of the 'Compositing method' parameter
// Poor man's enum for the compositing methods. If additional compositing methods are
// added, they have to be accounted for in the rendering function as well
const int CompositingMethodFrontToBack = 0;
const int CompositingMethodFirstHitPoint = 1;
const int CompositingMethodMaximumIntensityProjection = 2;
in vec2 st; // This is the texture coordinate of the fragment that we are currently
// computing. This is used to look up the entry/exit points to compute the
// direction of the ray
out vec4 out_color; // The output variable where we will store the final color that we
// painstakingly raycasted
/// This function computes the final color for the ray traversal by actually performing
/// the volume rendering.
/// @param entryCoord The coordinate where the ray enters the bounding box
/// @param exitCoord The coordinates where the ray exits the bounding box
/// @return The final color that this ray represents
vec4 traverseRay(vec3 entryCoord, vec3 exitCoord) {
// Make some space to collect the resulting color for this ray
vec4 result = vec4(0.0);
// Compute the ray direction based of the entry and exit coordinates
vec3 rayDirection = exitCoord - entryCoord;
// This is our ray-advance parameter which will go from t=0 for the entry point to
// t=tEnd for the exit point of the ray
float t = 0.0;
// Actually compute tEnd
float tEnd = length(rayDirection);
// Normalize the ray direction
rayDirection = normalize(rayDirection);
// The user gave us a step size along the ray, so we are using it here. Every step
// along the ray, we are incrementing t by tIncr and have a look at the sample
float tIncr = stepSize;
// Start with the volume rendering. We continue with this loop until we are either
// reaching the end of the ray (t >= tEnd) or if our resulting color is so saturated
// (result.a >= 0.99) that any other samples that would follow would have so little
// impact as to make the computation unnecessary (called early ray termination)
while (t < tEnd && result.a < 0.99) {
// Compute the current sampling position along the ray
vec3 sampleCoord = entryCoord + t * rayDirection;
// Sample the volume at the sampling position. The volume we are looking at is a
// scalar volume, so it only has a single value, which we extract here from the r
// component
float value = texture(volume, sampleCoord).r;
// Feed the value through the transfer function. The 0.5 here is a crux as WebGL2
// does not support 1D textures, so we need to choose a coordinate for the second
// dimension. The transfer function texture only has a single pixel in the second
// dimension and we want to avoid any potential issues with interpolation, so we
// access the pixel right in the center
vec4 color = texture(transferFunction, vec2(value, 0.5));
// We only want to continue if the sample we are currently using actually has any
// contribution. If the alpha value is 0, it will not contribute anything, so skip
if (color.a > 0.0) {
if (compositingMethod == CompositingMethodFrontToBack) {
// This line is a bit magic. Basically, we want to prevent that the brightness
// of the resulting image that we generate depends on the stepSize the user
// chooses. Higher step size means more samples per pixel, so we need to reduce
// the impact of each sample to keep the overall pixel value roughly the same.
// 150 is a randomly chosen value to serve as a reference contribution
const float ReferenceSamplingInterval = 150.0;
color.a = 1.0 - pow(1.0 - color.a, tIncr * ReferenceSamplingInterval);
//
// @TODO: Implement the front-to-back compositing here
//
result.rgb = (1.0 - result.a) * color.rgb * color.a + result.rgb;
result.a = (1.0 - result.a) * color.a + result.a;
//Add the color to the incoming rays in result to implement
// The front to back compositing.
}
else if (compositingMethod == CompositingMethodFirstHitPoint) {
//
// @TODO: Implement the first hitpoint compositing here
//
if (color.a > 0.2){
result = color;
t = tEnd;
}
}
else if (compositingMethod == CompositingMethodMaximumIntensityProjection) {
//
// @TODO: Implement the maximum-intensity projection here
//
if (color.a > result.a) {
result = color;
}
}
}
// Step further along the ray
t += tIncr;
}
// If we get here, the while loop above terminated, so we are done with the ray, so
// we can return the result
return result;
}
void main() {
// Access the entry point texture at our current pixel location to get the entry pos
vec3 entryCoord = texture(entryPoints, st).rgb;
// Access the exit point texture at our current pixel location to get the exit pos
vec3 exitCoord = texture(exitPoints, st).rgb;
// Poor man's enum for the render types. These values should be synchronized with the
// render function in case any of the numbers change
const int RenderTypeVolumeRendering = 0;
const int RenderTypeEntryPoints = 1;
const int RenderTypeExitPoints = 2;
const int RenderTypeRayDirection = 3;
const int RenderTypeTransferFunction = 4;
const int RenderTypeVolumeSlice = 5;
const int RenderTypeVolumeSliceWithTransferFunction = 6;
// The values that are checked against here have to be synced with the renderVolume
if (renderType == RenderTypeVolumeRendering) {
// Check for an early out. If the entry coordinate is the same as the exit
// coordinate then our current pixel is missing the volume, so there is no need for
// any ray traversal
if (entryCoord == exitCoord) {
discard;
}
// Perform the raycasting using the entry and the exit pos
vec4 pixelColor = traverseRay(entryCoord, exitCoord);
// As the raycasting might not return a fully opaque color (for example if the ray
// exits the volume without being fully saturated), we can't just assing the color,
// but need to mix (=lerp) it with a fully black background color
out_color = mix(vec4(0.0, 0.0, 0.0, 1.0), pixelColor, pixelColor.a);
}
else if (renderType == RenderTypeEntryPoints) {
// Just rendering the entry point coordinate back as a color
out_color = vec4(entryCoord, 1.0);
}
else if (renderType == RenderTypeExitPoints) {
// Just rendering the exit point coordinate back as a color
out_color = vec4(exitCoord, 1.0);
}
else if (renderType == RenderTypeRayDirection) {
// Render the ray direction as a color. We need to take the absolute value here as
// it is difficult to render negative colors
out_color = vec4(abs(exitCoord - entryCoord), 1.0);
}
else if (renderType == RenderTypeTransferFunction) {
// Just render the transfer function into the viewport
vec4 c = texture(transferFunction, vec2(st));
out_color = vec4(c.rgb, 1.0);
}
else if (renderType == RenderTypeVolumeSlice) {
// Take a central slice of the volume and render it to the screen. This is mainly
// meant as a control for the next option.
float value = texture(volume, vec3(st, 0.5)).r;
out_color = vec4(value, value, value, 1.0);
}
else if (renderType == RenderTypeVolumeSliceWithTransferFunction) {
// Take a central slice out of the volume and render it to the screen. Then, apply
// the transfer function to all pixels. This rendering option can be used to verify
// that the transfer function editor works as expected before trying it on the
// volume rendering
float value = texture(volume, vec3(st, 0.5)).r;
vec4 c = texture(transferFunction, vec2(value, 0.5));
out_color = vec4(c.rgb, 1.0);
}
}
`;
/// This function creates an OpenGL program object from the provided vertex and fragment
/// sources. If the creation of the OpenGL shader objects or the OpenGL program object
/// fails, a null object is returned.
/// @param gl the OpenGL context in which the program is created
/// @param vertexSource the source that is used for compiling the vertex shader
/// @param fragmentSource the source that is used for compiling the fragment shader
/// @return the compiled and linked program object or null if there was an error with the
/// compilation or the linking
function createProgram(gl, vertexSource, fragmentSource) {
// Helper function to load a shader of the specified type (Vertex or Fragment)
function loadShader(gl, type, source) {
// Create the ShaderObject
let shader = gl.createShader(type);
// Set the source code of the shader
gl.shaderSource(shader, source);
// Compile the shader code
gl.compileShader(shader);
// Check for compile errors
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
postError("Failed to compile shader: " + gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
return null;
}
else {
return shader;
}
}
// Create the vertex shader object
let vertexShader = loadShader(gl, gl.VERTEX_SHADER, vertexSource);
// Create the fragment shader object
let fragmentShader = loadShader(gl, gl.FRAGMENT_SHADER, fragmentSource);
// If either of the shaders failed to compile, we bail out
if (!vertexShader || !fragmentShader) {
return;
}
// Create the ProgramObject
let program = gl.createProgram();
// Attach the vertex and fragment shaders to the program object
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
// Link the Program Object
gl.linkProgram(program);
// Check for linking errors
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
postError('Failed to create program object: ' + gl.getProgramInfoLog(program));
gl.deleteProgram(program);
return;
}
return program;
}
function hexToRgb(hex) {
var result = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex);
return result ? {
r: parseInt(result[1], 16),
g: parseInt(result[2], 16),
b: parseInt(result[3], 16)
} : null;
}
/// This function is called when the transfer function texture on the GPU should be
/// updated. Whether the transfer function values are computed here or just retrieved
/// from somewhere else is up to decide for the implementation.
///
/// @param gl the OpenGL context
/// @param transferFunction the texture object that is updated by this function
function updateTransferFunction(gl, transferFunction) {
// Create a new array that holds the values for the transfer function. The width of 256
// is also hard-coded further down where the transferFunctionTexture OpenGL object is
// created, so if you want to change it here, you have to change it there as well. We
// multiply the value by 4 as we have RGBA for each pixel.
// Also we created the transfer function texture using the UNSIGNED_BYTE type, which
// means that every value in the transfer function has to be between [0, 255]
// This data should, at the end of your code, contain the information for the transfer
// function. Each value is stored sequentially (RGBA,RGBA,RGBA,...) for 256 values,
// which get mapped to [0, 1] by OpenGL
const hexColor1 = document.querySelector('input[name="color1"]').value;
const hexColor2 = document.querySelector('input[name="color2"]').value;
const hexColor3 = document.querySelector('input[name="color3"]').value;
const colorRGB1 = hexToRgb(hexColor1);
const colorRGB2 = hexToRgb(hexColor2);
const colorRGB3 = hexToRgb(hexColor3);
const range1 = Math.round(document.querySelector('input[name="range1"]').value);
const range2 = Math.round(document.querySelector('input[name="range2"]').value);
const range3 = Math.round(document.querySelector('input[name="range3"]').value);
const start1 = Math.round(document.querySelector('input[name="start1"]').value);
const start2 = Math.round(document.querySelector('input[name="start2"]').value);
const start3 = Math.round(document.querySelector('input[name="start3"]').value);
const alpha1 = document.querySelector('input[name="alpha1"]').value;
const alpha2 = document.querySelector('input[name="alpha2"]').value;
const alpha3 = document.querySelector('input[name="alpha3"]').value;
let data = new Uint8Array(256 * 4);
////////////////////////////////////////////////////////////////////////////////////////
/// Beginning of the provided transfer function
// The provided transfer function that you'll replace with your own solution is a
// relatively simple ramp with the first 50 values being set to 0 to reduce the noise in
// the image. The remainder of the ramp is just using different angles for the color
// components
for (let i = start1; i < (range1+start1); i += 4) {
// Set RGBA all to 0
data[i] = colorRGB1.r;
data[i + 1] = colorRGB1.g;
data[i + 2] = colorRGB1.b;
data[i + 3] = alpha1;
}
for (let i = start2; i < range2+start2; i += 4) {
// Set RGBA all to 0
data[i] = colorRGB2.r;
data[i + 1] = colorRGB2.g;
data[i + 2] = colorRGB2.b;
data[i + 3] = alpha2;
}
for (let i = start3; i < range3+start3; i += 4) {
// convert i into a value [0, 256] and set it
data[i] = colorRGB3.r;
data[i + 1] = colorRGB3.g;
data[i + 2] = colorRGB3.b;
data[i + 3] = alpha3;
}
// const cutoff = 50;
// for (let i = 0; i < cutoff * 4; i += 4) {
// // Set RGBA all to 0
// data[i] = 0;
// data[i + 1] = 0;
// data[i + 2] = 0;
// data[i + 3] = 0;
// }
// // For now, just create a linear ramp from 0 to 1. We start at the cutoff value and fill
// // the rest of the array
// for (let i = cutoff * 4; i < 256 * 4; i += 4) {
// // convert i into a value [0, 256] and set it
// const it = i / 4;
// data[i] = 2 * it;
// data[i + 1] = it;
// data[i + 2] = 3 * it;
// data[i + 3] = it;
// }
/// End of the provided transfer function
////////////////////////////////////////////////////////////////////////////////////////
// @TODO: Replace the transfer function specification above with your own transfer
// function editor result
// Upload the new data to the texture
console.log(117, "Updating the transfer function texture");
gl.bindTexture(gl.TEXTURE_2D, transferFunction);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 256, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, data);
}
/// Function that renders the bounding box using the provided model and view matrices. The
/// front-facing triangles are rendered into the framebuffer provided at 'fbFront', the
/// back-facing triangles will be rendered into the 'fbBack' framebuffer using the shader
/// program 'program'.
///
/// @param gl the OpenGL context
/// @param program the Program Object that is used to render the bounding box
/// @param modelMatrix the matrix that encodes the deformation of the bounding box that
/// should be done to accomodate non-cube volumetric datasets
/// @param viewMatrix the matrix that encodes the location of the camera
/// @param projectionMatrix the projection matrix that encodes the camera parameters
/// @param fbFront the Framebuffer to which the front side of the bounding box is rendered
/// @param fbBack the Framebuffer to which the back side of the bounding box is rendered
function renderBoundingbox(gl, program, modelMatrix, viewMatrix, projectionMatrix,
fbFront, fbBack)
{
//
// Initial setup common for both the front and back side
//
gl.enable(gl.CULL_FACE);
// Set the matrices used for the perspective rendering of the bounding box
gl.useProgram(program);
{
const location = gl.getUniformLocation(program, "modelMatrix");
gl.uniformMatrix4fv(location, false, modelMatrix);
}
{
const location = gl.getUniformLocation(program, "viewMatrix");
gl.uniformMatrix4fv(location, false, viewMatrix);
}
{
const location = gl.getUniformLocation(program, "projectionMatrix");
gl.uniformMatrix4fv(location, false, projectionMatrix);
}
//
// First render the back side
//
// Bind the back framebuffer as the target to modify
gl.bindFramebuffer(gl.FRAMEBUFFER, fbBack);
// Clear the color
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
// We are rendering the back side of the bounding box, so we want to cull the
// front-facing triangles
gl.cullFace(gl.FRONT);
// Our bounding box consists of 36 vertices, so we pass that number here. All of the
// vertex positions are hardcoded in the shader, so there is no need to get into the
// vertex buffer shizzle over here
gl.drawArrays(gl.TRIANGLES, 0, 36);
//
// Then render the front side
//
// Bind the front framebuffer as the target to modify
gl.bindFramebuffer(gl.FRAMEBUFFER, fbFront);
// Clear the color
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
// We are rendering the front side of the bounding box, so we want to cull the
// back-facing triangles
gl.cullFace(gl.BACK);
// Our bounding box consists of 36 vertices, so we pass that number here. All of the
// vertex positions are hardcoded in the shader, so there is no need to get into the
// vertex buffer shizzle over here
gl.drawArrays(gl.TRIANGLES, 0, 36);
}
/// This function renders the volume into the main framebuffer.
/// @param gl the OpenGL context
/// @param program the volume rendering program object that is executed to raycast
/// @param entryTexture the texture object that contains the entry point texture to
/// determine the entry point for each ray of the image
/// @param exitTexture the texture object that contains the exit point texture to
/// determine the exit point for each ray of the image
/// @param transferFunctionTexture the texture object that contains the transfer function
/// that should be used in the direct volume rendering
/// @param the step size between samples along the ray as determined by the user
/// @param compositingType the kind of compositing that should be used during the
/// rendering. Might be 'ftb' for front-to-back compositing, 'fhp' for the first
/// hit point compositing, or 'mip' for maximum-intensity projection compositing
/// @param renderType the type of rendering that we want to create on the screen. This
/// value is mainly used for debugging purposes and might be 'volume' if the direct
/// volume rendering is desired, 'entry' if only the entry points should be shown,
/// 'exit' if the exit points should be rendered, 'direction' if the absolute value
/// of the ray direction for each pixel is desired, 'transfer' to show a
/// representation of the transfer function, 'slice' to show a single grayscale
/// slice of the volume, and finally 'slice-transfer' to show a single slide of the
/// volume but with the transfer function applied to each pixel
function renderVolume(gl, program, volumeTexture, entryTexture, exitTexture,
transferFunctionTexture, stepSize, compositingType, renderType)
{
// Change the framebuffer to the browser-provided one
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
// Clear the color
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
// Bind the program used to render the volume
gl.useProgram(program);
// Bind the entry point texture
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, entryTexture);
gl.uniform1i(gl.getUniformLocation(program, "entryPoints"), 0); // 0 == gl.TEXTURE0
// Bind the exit point texture
gl.activeTexture(gl.TEXTURE1);
gl.bindTexture(gl.TEXTURE_2D, exitTexture);
gl.uniform1i(gl.getUniformLocation(program, "exitPoints"), 1); // 1 == gl.TEXTURE1
// Bind the volume texture
gl.activeTexture(gl.TEXTURE2);
gl.bindTexture(gl.TEXTURE_3D, volumeTexture);
gl.uniform1i(gl.getUniformLocation(program, "volume"), 2); // 2 == gl.TEXTURE2
// Bind the transfer function
gl.activeTexture(gl.TEXTURE3);
gl.bindTexture(gl.TEXTURE_2D, transferFunctionTexture);
gl.uniform1i(gl.getUniformLocation(program, "transferFunction"), 3); // 3 == gl.TEXTURE3
gl.uniform1f(gl.getUniformLocation(program, "stepSize"), stepSize);
// This if-statement has to be synchronized with the HTML radio button values as the
// strings here are the same as the values in those radio buttons. If new values are
// added here, the volume rendering shader has to be updated as well
if (renderType == "volume") {
gl.uniform1i(gl.getUniformLocation(program, "renderType"), 0);
}
else if (renderType == "entry") {
gl.uniform1i(gl.getUniformLocation(program, "renderType"), 1);
}
else if (renderType == "exit") {
gl.uniform1i(gl.getUniformLocation(program, "renderType"), 2);
}
else if (renderType == "direction") {
gl.uniform1i(gl.getUniformLocation(program, "renderType"), 3);
}
else if (renderType == "transfer") {
gl.uniform1i(gl.getUniformLocation(program, "renderType"), 4);
}
else if (renderType == "slice") {
gl.uniform1i(gl.getUniformLocation(program, "renderType"), 5);
}
else if (renderType == "slice-transfer") {
gl.uniform1i(gl.getUniformLocation(program, "renderType"), 6);
}
// This if statement has to be synchronized with the HTML radio button values as the
// strings here are the same as the values in those radio buttons. If new values are
// added here, the volume rendering shader has to be updated as well
if (compositingType == "ftb") {
gl.uniform1i(gl.getUniformLocation(program, "compositingMethod"), 0);
}
else if (compositingType == "fhp") {
gl.uniform1i(gl.getUniformLocation(program, "compositingMethod"), 1);
}
else if (compositingType == "mip") {
gl.uniform1i(gl.getUniformLocation(program, "compositingMethod"), 2);
}
// Trigger the volume rendering by rendering the 2 triangles (= 6 vertices) that/
// comprise the screen-aligned quad. The positions of the vertices are hard-coded in
// the shader, so there is no need to worry about vertex buffer objects
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
/// Main function to be executed after the page has been loaded. This will cause the
/// loading of the assets and trigger the render loop that will continuously update the
/// rendering. This function is marked as async as our 'fetch' request to get the volume
/// data requires this.
async function main() {
// Get the canvas object from the main document
let canvas = document.querySelector("#glCanvas");
// Get a WebGL 2.0 context from the canvas
let gl = canvas.getContext("webgl2");
// WebGL 2 is not supported on many browsers yet
if (!gl) {
postError("Error initializing WebGL2 context");
return;
}
console.log(100, "WebGL2 canvas created successfully");
//
// Initialize the shader programs
//
console.log(101, "Creating bounding box OpenGL program object");
const boundingBoxProgram = createProgram(
gl, boundingBoxVertexSource, boundingBoxFragmentSource
);
console.log(102, "Creating volume rendering OpenGL program object");
const volumeRenderingProgram = createProgram(
gl, volumeRenderingVertexSource, volumeRenderingFragmentSource
);
// If either program failed to compile or link, it has already printed an error
if (!boundingBoxProgram || !volumeRenderingProgram) {
return;
}
console.log(103, "Both program objects were created successfully");
//
// General setup for the OpenGL context
//
// Set the clear color to black with full opaqueness
gl.clearColor(0.0, 0.0, 0.0, 1.0);
//
// Creating intermediate framebuffer
//
// We create two framebuffers to render the entry and exit points into so that they are
// available during the volume rendering
console.log(104, "Creating the entry point texture");
// Create a new OpenGL texture
const entryTexture = gl.createTexture();
// Make the next texture the current 2D texture
gl.bindTexture(gl.TEXTURE_2D, entryTexture);
// Allocate the space for the texture; the 'null' in the data causes the data to be
// reserved, but we don't have to specify any specific data to use
gl.texImage2D(
gl.TEXTURE_2D, // texture type
0, // mip-map level
gl.RGB, // internal format
canvas.width, // texture width
canvas.height, // texture height
0, // border value
gl.RGB, // format
gl.UNSIGNED_BYTE, // type
null // data
);
// We want to do linear interpolation with this texture
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
// It shouldn't happen that we are accessing the texture outside the range, but these
// value need to be set in order to make OpenGL happy
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
console.log(105, "Creating the entry point frame buffer object");
// Create the actual framebuffer for the entry points
const entryFramebuffer = gl.createFramebuffer();
// Make this framebuffer the currently active one
gl.bindFramebuffer(gl.FRAMEBUFFER, entryFramebuffer);
// Attach the previously created texture to this framebuffer
gl.framebufferTexture2D(
gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0, // attachment position in the framebuffer
gl.TEXTURE_2D, // texture type
entryTexture, // target texture
0 // mip-map level at which to attach the texture
);
console.log(106, "Creating the exit point texture");
// Now do the same thing for the exit texture
const exitTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, exitTexture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, canvas.width, canvas.height, 0, gl.RGB,
gl.UNSIGNED_BYTE, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
console.log(107, "Creating the exit point frame buffer");
const exitFramebuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, exitFramebuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D,
exitTexture, 0);
// Reset the framebuffer to the browser-provided one just in case
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
//
// Create the texture that holds the transfer function
//
console.log(108, "Creating the transfer function texture");
const transferFunctionTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, transferFunctionTexture);
// We hard-code the resolution of the transfer function to 256 pixels
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 256, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
// Set the texture parameters that are required by OpenGL
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
//
// Load the volume data
//
console.log(109, "Creating the texture holding the volume");
const volumeTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_3D, volumeTexture);
let modelMatrix = mat4.create();
{
// Download the pig.raw file from the local server
console.log(110, "Downloading 'pig.raw' from 'localhost'");
let response;
try {
response = await fetch('pig.raw');
}
catch (error) {
// Something went wrong, so we have to bail
postError("Error accessing volume 'pig.raw': " + error);
return;
}
if (!response.ok) {
// The fetch request didn't fail catastrophically, but it still didn't succeed
postError("Error accessing volume 'pig.raw'");
return;
}
// Access the response payload as a binary data blob
console.log(111, "Accessing the blob data from the response");
const blob = await response.blob();
// Convert it into an array buffer
console.log(112, "Cast the blob into an array buffer");
const data = await blob.arrayBuffer();
// From the available meta data for the pig.raw dataset I know that each voxel is
// 16 bit unsigned integer
console.log(113, "Cast the array buffer into a Uint16 typed array");
const typedData = new Uint16Array(data);
// Our volume renderer really likes 8 bit data, so let's convert it
console.log(114, "Convert the array into a Uint8 array");
const convertedData = new Uint8Array(typedData.length)
for (var i = 0; i < typedData.length; i++) {
// The range of the dataset is [0, 4096), so we need to convert that into
// [0, 256) manually
convertedData[i] = typedData[i] / 4096.0 * 256.0
}
// The volume size also comes from the available meta data for the pig.raw
// if you want to have a look at the metadata, the pig.dat contains that
const volumeSize = [ 512, 512, 134 ];
console.log(115, "Upload the volume to the GPU");
gl.texImage3D(
gl.TEXTURE_3D, // 3D texture -> volume
0, // the mipmap level, still 0
gl.R8, // the texture should only have a single component
volumeSize[0], // x dimension of the volume
volumeSize[1], // y dimension of the volume
volumeSize[2], // z dimension of the volume
0, // value used for the border voxels
gl.RED, // only a single component, and that is Red
gl.UNSIGNED_BYTE, // each voxel is represented by a single unsigned byte
convertedData // the volume data
);
// Compute the model matrix for this data set. These values are all also part of the
// meta data for this data set (see the .dat file, if you are interested in them)
mat4.rotate(modelMatrix, modelMatrix, 3 * Math.PI / 2, [1.0, 0.0, 0.0]);
mat4.scale(modelMatrix, modelMatrix, [1.0, 1.0, 0.7052631578947368]);
}
// We need to specify these parameters to make the texture well formed in the eyes of
// OpenGL, compared to all the other times, now we also need to specify the wrapping
// behavior of the R (the third texture dimension) component
gl.texParameteri(gl.TEXTURE_3D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_3D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_3D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_3D, gl.TEXTURE_WRAP_R, gl.CLAMP_TO_EDGE);
// Create the projection matrix
let projectionMatrix = mat4.create();
{
const fieldOfView = 45 * 2.0 * Math.PI / 360.0; // 45 degrees in radians
const aspectRatio = gl.canvas.clientWidth / gl.canvas.clientHeight; // assuming > 1.0
const zNear = 0.1; // Near clipping plane
const zFar = 100.0; // Far clipping plane
// Use a convenience method to create a perspective matrix for the bounding box
mat4.perspective(projectionMatrix, fieldOfView, aspectRatio, zNear, zFar);
}
//
// Rendering
//
function internalRender() {
// If for whatever reason the transfer function is dirty, create the new data
// representation and upload to the texture
if (TransferFunctionIsDirty) {
updateTransferFunction(gl, transferFunctionTexture);
TransferFunctionIsDirty = false;
// If we update the transfer function, that also implies that the rendering has
// changed
RenderingIsDirty = true;
}
// If the rendering is not dirty, nothing has changed since the last animationframe
// that would warrant a rerender, so we just queue ourselves up again and yield now
if (!RenderingIsDirty) {
requestAnimationFrame(internalRender);
return;
}
let viewMatrix = mat4.create();
{
// Compute the camera location by converting spherical coordinates into cartesian
const r = document.getElementById("camera-r").value / 10.0;
const phi = document.getElementById("camera-phi").value;