Ähnliche Beiträge: Keine ähnlichen Artikel gefunden.

// Import necessary libraries
import gab.opencv.*;
import processing.video.*;
import java.awt.*;
import java.util.ArrayList;
Capture video;
OpenCV opencv;
// Variables for face tracking
float tx, ty, ts; // Target x, y, and size
float sx, sy, ss; // Smoothed x, y, and size
// ArrayList to store face shots
ArrayList<PImage> faceShots = new ArrayList<PImage>();
// Timer for capturing face shots every second
int lastCaptureTime = 0;
void setup() {
// Set up the canvas size
size(800, 600);
// Initialize the video capture with the specified pipeline
video = new Capture(this, 640, 480);
// Initialize OpenCV with the same dimensions as the canvas
opencv = new OpenCV(this, 640, 480);
// Load the frontal face cascade classifier
opencv.loadCascade(OpenCV.CASCADE_FRONTALFACE);
// Start the video capture
video.start();
}
void draw() {
// Load the current frame from the video into OpenCV
opencv.loadImage(video);
// Display the video frame on the canvas
//image(video, 0, 0);
// Detect faces in the current frame
Rectangle[] faces = opencv.detect();
// If at least one face is detected
if (faces.length > 0) {
// Capture a face shot every second
if (millis() - lastCaptureTime >= 200) {
// Get the coordinates and dimensions of the detected face
int x = faces[0].x;
int y = faces[0].y;
int w = faces[0].width;
int h = faces[0].height;
// Create a new PImage for the face cutout
PImage faceShot = createImage(w, h, RGB);
faceShot.copy(video, x, y, w, h, 0, 0, w, h);
// Add the face shot to the ArrayList
faceShots.add(faceShot);
// Update the last capture time
lastCaptureTime = millis();
println("Captured face shot!");
}
}
// Optionally, display the captured face shots on the canvas (for debugging purposes)
for (int i = 0; i < faceShots.size(); i++) {
image(faceShots.get(i), (i % 5) * 160, 0 + (i / 5) * 120, 160, 120); // Display face shots as thumbnails
}
}
// Event handler for video capture
void captureEvent(Capture c) {
c.read(); // Read the new frame from the capture
}