Image processing with WebGL.
window.onload = function() {
// Send the picture to WebGL as texture. To do this with video/webcam...
// ... change store_picture to store_video / store_webcam.
var storageId = numgl.store_picture("imgElementID");
numgl.show_texture(storageId);
}
numgl.grey(storageId);
Fiddle with it:
// [-1,-1,-1,0,0,0,1,1,1] is a 3x3 kernel, where:
// - 1st row is [-1,-1,-1]; 2nd row is [0,0,0]; 3rd row is [1,1,1].
numgl.convolution(storageId, [-1,-1,-1,0,0,0,1,1,1]);
// Other kernels are supported.
JS Fiddle with it:
// 80 is the thresholding value.
numgl.threshold(storageId, 80);
JS Fiddle:
window.onload = function() {
var imageId = numgl.store_picture("image");
numgl.show_canvas(imageId);
// Convolution followed by threshold
var convResult = numgl.convolution(imageId,[-1,-1,-1,0,0,0,1,1,1]);
numgl.threshold(convResult,10);
numgl.do_it();
}
JS Fiddle:
// If this is not set, the fps are not shown.
numgl.set_fps_element(fpsElementId);
// Draws the following 2x2 RGBA texture:
// [white, black,
// black, white]
// numgl.store_array()'s last 2 args are width and height.
var arrayId = numgl.store_array([255,255,255,255,
0,0,0,255,
0,0,0,255,
255,255,255,255],
2, 2);
// show_texture() calls do_it() internally.
numgl.show_texture(arrayId);
// If no width and height are specified, read_canvas() will read the whole canvas.
console.log(numgl.read_canvas().toString());
// Using the above example the result would be: "255,255,255,255,0,0,0,255,0,0,0,255,255,255,255,255"
// Called before numgl.do_it()
numg.textures[storageId].flipTexture = true/false
// Paint the screen blue.
numgl.fragColor = "vec4(0, 0, 1, 1);"
// Fragment shader code. Vertex shader code function is vs_code().
console.log(numgl.fs_code("pretty"));
Video and webcam use requestAnimationFrame so the video/webcam frames are drawn multiple times. Pictures are only drawn once.
Javascript functions generate GLSL code, and when the user calls numgl.do_it() that GLSL code is compiled and executed. eg:
window.onload = function() {
// "image" is the <img> tag ID.
var imageId = numgl.store_picture("image");
numgl.grey(imageId);
// Console log the GLSL code generated by the numgl.grey() call:
console.log(numgl.fs_code("pretty"));
}
The resulting GLSL code: (uTexture0 is the stored texture - it can be an array, picture or video frame)
precision highp float;
uniform vec2 uResolution;
uniform sampler2D uTexture0;
uniform vec2 uTextureSize0;
varying vec2 vTextCoords;
void main(void) {
float float_0 = 0.2126 * texture2D(uTexture0, vTextCoords).r;
float float_1 = 0.7152 * texture2D(uTexture0, vTextCoords).g;
float float_2 = 0.0722 * texture2D(uTexture0, vTextCoords).b;
float float_3 = float_0 + float_2 + float_1;
vec4 vec4_0 = vec4(float_3,float_3,float_3,1);
gl_FragColor = vec4_0;
}
For local images / video, you’ll need to set up a local server. It’s easy, just open up your console, go to your project’s folder and type in python3 -m http.server 8000
.
Images and video from other websites require the inline HTML attribute crossorigin="anonymous"
- see the img tag here, for example.