The diffuse bed
The visualization shows the reactive state of the ai reacting to video feed in, therefore motion and training pulse.https://glslsandbox.com/e#101349.1
PShader s; PGraphics cnv; int w,h; void setup(){ size(600,400,P2D); s = loadShader("field.glsl"); s.set("resolution", float(width), float(height) ); cnv = createGraphics(600,400,P2D); noStroke(); w = width; h= height; } void draw(){ s.set("time", millis() / 1000.0); cnv.beginDraw(); cnv.noStroke(); cnv.rect(0,0,w,h); //cnv.background(222); cnv.shader(s); cnv.endDraw(); image(cnv,0,0); }
#extension GL_OES_standard_derivatives : enable precision mediump float; uniform float time; uniform vec2 mouse; uniform vec2 resolution; mat2 rotate2D(float r) { return mat2(cos(r), sin(r), -sin(r), cos(r)); } void main() { float t = time*.1; float brightness = (sin(t*12.) +1.) *.1; float fluffi = (sin(t*2.) +1.) *.1 + 1.1; // change slowly! float detail = 9.; float metaloid = 1.03; // .3 = smooth // 1.03 liquid // metaloid only change hard - no slide // Normalized pixel coordinates (from 0 to 1) vec2 uv = (gl_FragCoord.xy-.5*resolution.xy)/resolution.y; vec3 col = vec3(2); vec2 n = vec2(1); vec2 q = vec2(1); vec2 p = uv; float d = dot(p,p); float S = detail; float a = 0.05; mat2 m = rotate2D(2.2); for (float j = 0.; j < 11.; j++) { p *= m; n *= m; q = p * S + t * 4. + sin(t * 4. - d * 3.) * 2.09 + j + n; // wtf??? a += dot(cos(q*metaloid)/S, vec2(.2)); n -= sin(q); S *= fluffi; } col = vec3(7, 2, 4) * (a + brightness) + a + a ; //col -=d; // vignette // Output to screen gl_FragColor = vec4(col,1.0); }
The main setup
import gohai.glvideo.*; GLCapture video; PShader edge_shader; PGraphics video_direct_buffer; // video buffer graphcis PGraphics video_p_buffer; PGraphics diffBuffA; // video buffer graphcis PGraphics canvas; // buffer graphcis PGraphics ebuff; int sw = 640; int sh = 480; int cw = 320; int ch = 240; int bw = 80; int bh = 60; float p_brisum = 0; float sbs = 0; float smdiff = 0; void setup() { size(640, 480, P2D); initCam(); initShaders(); noCursor(); edge_shader = loadShader("shader_one.glsl"); edge_shader.set("iResolution", float(cw), float(ch) ); frameRate(25); } void draw(){ //background(122); if (video.available()) { video.read(); video.updatePixels(); video_direct_buffer.beginDraw(); video_direct_buffer.image(video,0,0); video_direct_buffer.endDraw(); } edge_shader.set("iChannel0", video_direct_buffer); edge_shader.set("iChannel1", video_p_buffer); canvas.beginDraw(); canvas.rect(0,0,22,22); canvas.filter(edge_shader); canvas.endDraw(); video_p_buffer.beginDraw(); video_p_buffer.background(222); video_p_buffer.image(video_direct_buffer,0,0 ); video_p_buffer.endDraw(); ebuff.beginDraw(); ebuff.rect(0,0,22,22); ebuff.image(canvas,0,0,bw,bh); ebuff.endDraw(); ebuff.loadPixels(); float brisum = 0; for(int i = 0;i< ebuff.pixels.length;i++){ color cc = ebuff.pixels[i]; float r = red(cc); float g = green(cc); float b = blue(cc); brisum += .2126*r + 0.7152 * g + 0.0722 * b; } //ebuff.Pixels(); brisum /= float(ebuff.pixels.length); smdiff = lerp(smdiff,brisum, 0.15); //image(video_p_buffer,80,0,sw/2,sh/2); image(canvas,0,0,sw,sh); image(video_direct_buffer,0,0,sw/4,sh/4); //image(ebuff,0,160); fill(smdiff*10); rect(80,80,44,44); }
void initShaders(){ video_direct_buffer = createGraphics(cw,ch,P2D); video_direct_buffer.beginDraw(); video_direct_buffer.background(122); video_direct_buffer.endDraw(); video_p_buffer = createGraphics(cw,ch,P2D); video_p_buffer.beginDraw(); video_p_buffer.background(122); video_p_buffer.endDraw(); canvas = createGraphics(cw,ch,P2D); canvas.beginDraw(); canvas.background(122); canvas.endDraw(); ebuff = createGraphics(bw,bh,P2D); ebuff.beginDraw(); ebuff.background(122); ebuff.endDraw(); } void initCam(){ /* String[] devices = GLCapture.list(); println("Devices:"); // if (0 < devices.length) { String[] configs = GLCapture.configs(devices[0]); // println("Configs:"); //printArray(configs); } printArray(devices); GLCapture.configs(devices[0]); */ String[] devices = GLCapture.list(); //video = new GLCapture(this); video = new GLCapture(this, devices[0], cw, ch, 25); video.start(); }
#ifdef GL_ES precision mediump float; #endif #define PROCESSING_TEXTURE_SHADER uniform float iTime; uniform vec2 iMouse; uniform vec2 iResolution; uniform sampler2D iChannel0; // direct cam feed uniform sampler2D iChannel1; // buffer uniform sampler2D texture; uniform vec2 texOffset; varying vec4 vertColor; varying vec4 vertTexCoord; void mainImage( out vec4 fragColor, in vec2 fragCoord ) { vec2 res = iResolution.xy; vec2 uv = fragCoord / res; vec4 cam_direct = texture2D(iChannel0,uv); vec2 tc0 = vertTexCoord.st + vec2(-texOffset.s, -texOffset.t); vec2 tc1 = vertTexCoord.st + vec2( 0.0, -texOffset.t); vec2 tc2 = vertTexCoord.st + vec2(+texOffset.s, -texOffset.t); vec2 tc3 = vertTexCoord.st + vec2(-texOffset.s, 0.0); vec2 tc4 = vertTexCoord.st + vec2( 0.0, 0.0); vec2 tc5 = vertTexCoord.st + vec2(+texOffset.s, 0.0); vec2 tc6 = vertTexCoord.st + vec2(-texOffset.s, +texOffset.t); vec2 tc7 = vertTexCoord.st + vec2( 0.0, +texOffset.t); vec2 tc8 = vertTexCoord.st + vec2(+texOffset.s, +texOffset.t); //texture = iChannel0; vec4 col0 = texture2D(iChannel0, tc0); vec4 col1 = texture2D(iChannel0, tc1); vec4 col2 = texture2D(iChannel0, tc2); vec4 col3 = texture2D(iChannel0, tc3); vec4 col4 = texture2D(iChannel0, tc4); vec4 col5 = texture2D(iChannel0, tc5); vec4 col6 = texture2D(iChannel0, tc6); vec4 col7 = texture2D(iChannel0, tc7); vec4 col8 = texture2D(iChannel0, tc8); // dir_edge is the direct feed converted to edges! vec4 dir_edge = 8.0 * col4 - (col0 + col1 + col2 + col3 + col5 + col6 + col7 + col8); vec4 camdirin = texture2D(iChannel0,uv); vec4 buffer = texture2D(iChannel1,uv); float cam_edge_bri = dot(camdirin.xyz,vec3(0.2126,0.7152,0.0722)); float buff_bri = dot(buffer.xyz,vec3(0.2126,0.7152,0.0722)); //float eggi = buff_bri - cam_edge_bri; float eggi = abs(camdirin.r - buffer.r); fragColor = vec4(vec3(eggi*.6), 1.0); //fragColor = vec4(coldiff, 1.0); } void main(void) { mainImage(gl_FragColor, gl_FragCoord.xy); }