🔍

Shaderbased webcam filter




import processing.video.*;
 
PImage tex;
PShader deform;

Capture video;

void setup() {
   size(2000, 3000, P2D);
  //fullScreen(P2D);
  textureWrap(REPEAT);
  tex = loadImage("tex1.jpg");
 
  deform = loadShader("deform.glsl");
  deform.set("resolution", float(width), float(height));
  
    // Initialize the video capture with the specified pipeline
  video = new Capture(this, 1280, 720);
  
  video.start();
}

void draw() {
  deform.set("time", millis() / 1000.0);
  deform.set("mouse", random(100), float(mouseY));
  deform.set("fluffy", sin(millis()*.01)+1, float(mouseX));
  shader(deform);
  image(video, 0, 0, width, height);
}


// Event handler for video capture
void captureEvent(Capture c) {
  c.read(); // Read the new frame from the capture
}


#ifdef GL_ES
precision mediump float;
precision mediump int;
#endif

#define PROCESSING_TEXTURE_SHADER

uniform sampler2D texture;

uniform float time;
uniform vec2 resolution;
uniform vec2 mouse;
uniform vec2 fluffy;

void main(void) {
    vec2 p = -1.0 + 1.0 * gl_FragCoord.xy / resolution.xy;
    p.y *= -1.0;

    // Mirror on x and y axes
    p = abs(p*2.);

    // Ripple effect
    vec2 uv = gl_FragCoord.xy / resolution.xy;
    vec2 fromMid = uv - vec2(0.5, 0.5);
    float dFromMid = sqrt(fromMid.x * fromMid.x + fromMid.y * fromMid.y);
    float sound = .2;//fluffy.x*.01; // texture2D(iChannel1, vec2(0.0, 1.0)).x;
   // sound += 0.025;

    vec2 delta = fromMid * -0.5 * sound * (1.0 + sin(90.0 * dFromMid - 15.0  )) * mouse.x * 0.1;
//    p += delta*.01;

    // Apply sinusoidal distortion to color channels
    //p.x += sin(p.y * 0.02 + time*.01) * 0.01;
    vec3 col = texture2D(texture, p).xyz;

     
    col.r = col.r * sin(p.y * 2.51 + time);
    col.g = col.g * sin(p.y * 4.7 + time + 1.0); // offset phase for green channel
    col.b = col.b * sin(p.x * 9.99 + time*5. + 2.0); // offset phase for blue channel
     
      col.b *= sin(col.b * 32.0);

    gl_FragColor = vec4(col, 1.0);
}

RIPPLE



#ifdef GL_ES
precision mediump float;
precision mediump int;
#endif

#define PROCESSING_TEXTURE_SHADER

uniform sampler2D texture;

uniform float time;
uniform vec2 resolution;
uniform vec2 mouse;
uniform vec2 fluffy;

void main(void) {
    vec2 p = -1.0 + 1.0 * gl_FragCoord.xy / resolution.xy;
   // p.y *= -1.0;
   p.y*=3.;

	  // Mirror uv coordinates on both x and y axes
    p = fract(p * 2.0) - 0.5;
    p= abs(p);

    // Ripple effect
    vec2 uv = gl_FragCoord.xy / resolution.xy;
    vec2 fromMid = uv - vec2(0.5, 0.5);
    float dFromMid = length(fromMid);
    float sound = 0.2; // Adjusted for a stable effect

    // Ripple parameters
    float frequency = 10.0  + mouse.x *80; // Higher frequency for more ripples
    float amplitude = 0.01 +   mouse.y*.72; // Lower amplitude for subtle ripples
    float speed = 0.75; // Speed of ripple effect

    // Calculate ripple distortion
    vec2 ripple = fromMid * amplitude * sin(frequency * dFromMid - speed * time);
    p += ripple;

    // Apply sinusoidal distortion to color channels
    vec3 col = texture2D(texture, p).xyz;
   // col.r = col.r * sin(p.y * 2.51 + time);
   // col.g = col.g * sin(p.y * 4.7 + time + 1.0); // offset phase for green channel
    //col.b = col.b * sin(p.x * 9.99 + time * 5.0 + 2.0); // offset phase for blue channel
  //  col.b *= (sin(col.b * 32.0*p.y) +1.0)*.5;
   // col.r *= (sin(col.r * 32.0*p.x) +1.0)*.5;
   
   col.r *= (sin(ripple.x*2. + p.y*4.)+1.)*2.5;

    gl_FragColor = vec4(col, 1.0);
}





import processing.video.*;
 
PImage tex;
PShader deform;

Capture video;

void setup() {
   size(720, 480, P2D);
  //fullScreen(P2D);
  textureWrap(REPEAT);
  
 
  deform = loadShader("deform.glsl");
  deform.set("resolution", float(width), float(height));
  
    // Initialize the video capture with the specified pipeline
  video = new Capture(this, 640, 480);
  
  video.start();
}

void draw() {
  deform.set("time", millis() / 1000.0);
  deform.set("mouse", float(mouseX)/float(width), float(mouseY)/float(height));
  deform.set("fluffy", sin(millis()*.01)+1, float(mouseX));
  shader(deform);
  image(video, 0, 0, width, height);
}


// Event handler for video capture
void captureEvent(Capture c) {
  c.read(); // Read the new frame from the capture
}