<!DOCTYPE html>
<html lang="de">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Webcam Slit-Scanner</title>
<script src="https://cdn.tailwindcss.com"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/r128/three.min.js"></script>
<style>
body, html {
margin: 0;
padding: 0;
overflow: hidden;
background-color: #000;
font-family: 'Inter', sans-serif;
color: white;
}
canvas {
display: block;
width: 100vw;
height: 100vh;
}
</style>
</head>
<body class="bg-gray-900 text-white">
<div id="ui-container" class="absolute top-4 left-4 z-10 flex flex-col items-start space-y-2 p-4 bg-gray-800 bg-opacity-70 rounded-xl shadow-lg">
<h1 class="text-xl font-bold">Webcam Slit-Scanner</h1>
<div id="sliders-container" class="mt-4 w-full space-y-4">
<!-- Schieberegler werden hier dynamisch hinzugefügt -->
</div>
<div id="status-message" class="text-sm text-gray-300 mt-2">Warte auf Kamerazugriff...</div>
</div>
<div id="webgl-container"></div>
<script>
// Fragment-Shader für den Slit-Scan-Effekt (Buffer-Pass)
const slitScanFragmentShader = `
uniform vec2 iResolution;
uniform sampler2D iChannel0; // Vorheriger Puffer (readBuffer)
uniform sampler2D iChannel1; // Webcam-Feed (videoTexture)
uniform float iTime;
uniform float u_speed;
uniform float u_delay;
void main() {
// Get the screen UV and the vertical 'slit' position
vec2 uv = gl_FragCoord.xy / iResolution.xy;
float slitpos = mod(iTime * u_speed, iResolution.y + u_delay);
// Sample the previous frame
vec3 last = texture2D(iChannel0, uv).rgb;
vec3 col = last;
// Draw the 1-pixel thin new slit
float slit = step(gl_FragCoord.y, slitpos) - step(gl_FragCoord.y, slitpos - 1.0);
col = mix(col, texture2D(iChannel1, uv).rgb, slit);
gl_FragColor = vec4(col, 1.0);
}
`;
// Fragment-Shader für das endgültige Rendern auf den Bildschirm (einfacher Passthrough)
const screenPassFragmentShader = `
uniform vec2 iResolution;
uniform sampler2D iChannel0;
void main() {
vec2 uv = gl_FragCoord.xy / iResolution.xy;
gl_FragColor = texture2D(iChannel0, uv);
}
`;
// Einfacher Vertex-Shader, der für beide Materialien verwendet wird
const vertexShader = `
void main() {
gl_Position = vec4(position, 1.0);
}
`;
const uniforms = {
u_speed: { label: "Geschwindigkeit", value: 50.0, min: 10.0, max: 200.0, step: 1.0 },
u_delay: { label: "Verzögerung", value: 120.0, min: 0.0, max: 200.0, step: 1.0 }
};
let camera, scene, renderer, videoTexture;
let video = document.createElement('video');
let isCameraReady = false;
// Ping-Pong-Puffer
let readBuffer, writeBuffer;
// Materialien für die beiden Render-Pässe
let slitScanMaterial, screenPassMaterial;
const container = document.getElementById('webgl-container');
const statusMessage = document.getElementById('status-message');
const slidersContainer = document.getElementById('sliders-container');
function createSliders() {
slidersContainer.innerHTML = '';
for (const name in uniforms) {
const uniform = uniforms[name];
const sliderGroup = document.createElement('div');
sliderGroup.className = 'w-full';
sliderGroup.innerHTML = `
<label for="${name}" class="text-sm font-medium block">${uniform.label}</label>
<div class="flex items-center space-x-2 mt-1">
<input type="range" id="${name}" name="${name}" min="${uniform.min}" max="${uniform.max}" step="${uniform.step}" value="${uniform.value}"
class="w-full h-2 bg-gray-600 rounded-lg appearance-none cursor-pointer">
<span id="${name}-value" class="w-12 text-sm text-center font-mono">${uniform.value}</span>
</div>
`;
slidersContainer.appendChild(sliderGroup);
const slider = document.getElementById(name);
const valueSpan = document.getElementById(`${name}-value`);
slider.addEventListener('input', (event) => {
const newValue = parseFloat(event.target.value);
uniform.value = newValue;
valueSpan.textContent = newValue.toFixed(3);
});
}
}
function init() {
scene = new THREE.Scene();
camera = new THREE.OrthographicCamera(-1, 1, 1, -1, 0.1, 100);
camera.position.z = 1;
renderer = new THREE.WebGLRenderer();
renderer.setSize(window.innerWidth, window.innerHeight);
container.appendChild(renderer.domElement);
const options = {
minFilter: THREE.LinearFilter,
magFilter: THREE.LinearFilter,
format: THREE.RGBAFormat,
type: THREE.FloatType
};
readBuffer = new THREE.WebGLRenderTarget(window.innerWidth, window.innerHeight, options);
writeBuffer = new THREE.WebGLRenderTarget(window.innerWidth, window.innerHeight, options);
slitScanMaterial = new THREE.ShaderMaterial({
uniforms: {
iResolution: { value: new THREE.Vector2(window.innerWidth, window.innerHeight) },
iChannel0: { value: null },
iChannel1: { value: null },
iTime: { value: 0.0 },
u_speed: { value: uniforms.u_speed.value },
u_delay: { value: uniforms.u_delay.value }
},
vertexShader: vertexShader,
fragmentShader: slitScanFragmentShader
});
screenPassMaterial = new THREE.ShaderMaterial({
uniforms: {
iResolution: { value: new THREE.Vector2(window.innerWidth, window.innerHeight) },
iChannel0: { value: null },
},
vertexShader: vertexShader,
fragmentShader: screenPassFragmentShader
});
const geometry = new THREE.PlaneGeometry(2, 2);
const quad = new THREE.Mesh(geometry, slitScanMaterial);
scene.add(quad);
createSliders();
if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
const constraints = { video: { width: 1280, height: 720 } };
navigator.mediaDevices.getUserMedia(constraints).then(function(stream) {
statusMessage.textContent = 'Kamera verbunden.';
isCameraReady = true;
video.srcObject = stream;
video.play();
videoTexture = new THREE.VideoTexture(video);
}).catch(function(error) {
statusMessage.textContent = 'Fehler beim Zugriff auf die Kamera: ' + error.name;
console.error("Camera access denied or failed: ", error);
});
} else {
statusMessage.textContent = 'Dein Browser unterstützt keine Webcam.';
}
window.addEventListener('resize', onWindowResize, false);
}
function onWindowResize() {
const width = window.innerWidth;
const height = window.innerHeight;
renderer.setSize(width, height);
slitScanMaterial.uniforms.iResolution.value.x = width;
slitScanMaterial.uniforms.iResolution.value.y = height;
screenPassMaterial.uniforms.iResolution.value.x = width;
screenPassMaterial.uniforms.iResolution.value.y = height;
readBuffer.setSize(width, height);
writeBuffer.setSize(width, height);
}
function animate() {
requestAnimationFrame(animate);
if (isCameraReady) {
if (videoTexture) {
videoTexture.needsUpdate = true;
}
slitScanMaterial.uniforms.iTime.value = performance.now() / 1000;
slitScanMaterial.uniforms.u_speed.value = uniforms.u_speed.value;
slitScanMaterial.uniforms.u_delay.value = uniforms.u_delay.value;
renderer.setRenderTarget(writeBuffer);
renderer.clear();
slitScanMaterial.uniforms.iChannel0.value = readBuffer.texture;
slitScanMaterial.uniforms.iChannel1.value = videoTexture;
scene.children[0].material = slitScanMaterial;
renderer.render(scene, camera);
renderer.setRenderTarget(null);
screenPassMaterial.uniforms.iChannel0.value = writeBuffer.texture;
scene.children[0].material = screenPassMaterial;
renderer.render(scene, camera);
let temp = readBuffer;
readBuffer = writeBuffer;
writeBuffer = temp;
}
}
window.onload = function() {
init();
animate();
};
</script>
</body>
</html>
SLITSCAN vs. GenAI
portable slitscanner DEMO: https://codepen.io/Tristan-Schulze/pen/OPyGNMg