first i am sorry actually this is processing code were it will send coordinate to arduino system, i just decided to ask in this forum since i already wondering in processing forum and stack overflow forum and not found the answer. following is the first code (two dimensional array in for loop) which doing color tracking on left side ( from 0-100 of x axis ) and and bottom part ( 240 - 480 of y axis) of the image .
import processing.video.*;
Capture video;
float threshold = 210;
color trackColor;
PVector target;
void setup() {
size(640, 480);
video = new Capture(this, width, height);
video.start();
trackColor = color(160,0,0); // Start off tracking for red
}
void captureEvent(Capture video) {
// Read image from the camera
video.read();
}
void draw() {
loadPixels();
video.loadPixels();
image(video, 0, 0);
float avgX = 0;
float avgY = 0;
int count = 0;
for (int x = 0; x < video.width && x < 100; x ++ ) {
for (int y = 240; y < video.height; y ++ ) {
int loc = x + y*video.width;
color currentColor = video.pixels[loc];
float r1 = red(currentColor);
float g1 = green(currentColor);
float b1 = blue(currentColor);
float r2 = red(trackColor);
float g2 = green(trackColor);
float b2 = blue(trackColor);
// Using euclidean distance to compare colors
float d = distSq(r1, g1, b1, r2, g2, b2);
if (d < threshold) {
stroke(255);
strokeWeight(1);
point(x,y);
avgX += x;
avgY += y;
count++;
}
}
}
if (count > 0) {
avgX = avgX / count;
avgY = avgY / count;
// Draw a circle at the tracked pixel
fill(trackColor);
strokeWeight(4.0);
stroke(0);
ellipse(avgX, avgY, 20, 20);
text("brightnesslevel: " + trackColor, 20, 60);
text("FPS: " + frameRate, 20, 80);
}
target = new PVector (avgX, avgY);
}
float distSq(float x1,float y1, float z1, float x2, float y2, float z2){
float d = (x2-x1)*(x2-x1) + (y2-y1)*(y2-y1) + (z2-z1)*(z2-z1);
return d;
}
void mousePressed() {
// Save color where the mouse is clicked in trackColor variable
int loc = mouseX + mouseY*video.width;
trackColor = video.pixels[loc];
}
following code is the same function to track a color but with one dimensional array in the for loop
import processing.video.*;
PShader colorFinder, colorPosShader;
PGraphics overlay, posBuffer;
// Variable for capture device
Capture video;
// A variable for the color we are searching for.
color trackColor;
float threshold = 0.1;
void setup() {
//size(320, 240);
size(640, 480, P2D);
overlay = createGraphics(width, height, P2D);
posBuffer = createGraphics(width, height, P2D);
colorFinder = loadShader("colorDetect.glsl");
colorPosShader = loadShader("colorPos.glsl");
printArray(Capture.list());
video = new Capture(this, width, height);
video.start();
video.loadPixels();
// Start off tracking for red
trackColor = color(255, 0, 0);
}
void captureEvent(Capture video) {
// Read image from the camera
video.read();
}
void draw() {
colorFinder.set("threshold", threshold);
colorFinder.set("targetColor", red(trackColor) / 255.0, green(trackColor) / 255.0, blue(trackColor) / 255.0, 1.0);
colorPosShader.set("threshold", threshold);
colorPosShader.set("targetColor", red(trackColor) / 255.0, green(trackColor) / 255.0, blue(trackColor) / 255.0, 1.0);
overlay.beginDraw();
overlay.shader(colorFinder);
overlay.image(video, 0, 0);
overlay.endDraw();
posBuffer.beginDraw();
posBuffer.shader(colorPosShader);
posBuffer.image(video, 0, 0);
posBuffer.endDraw();
//compute average position by looking at pixels from position buffer
posBuffer.loadPixels();
PVector avg = new PVector(0, 0);
int count = 0;
for(int i = 0; i < posBuffer.pixels.length; i++){
// encoded so blue is > 0 if a pixel is within threshold
if(blue(posBuffer.pixels[i]) > 0){
count++;
// processing takes 0-1 (float) color values from shader to 0-255 (int) values for color
// to decode, we need to divide the color by 255 to get the original value
avg.add(red(posBuffer.pixels[i]) / 255.0, green(posBuffer.pixels[i]) / 255.0);
}
}
if(count > 0){
// we have the sum of positions, so divide by the number of additions
avg.div((float) count);
// convert 0-1 position to screen position
avg.x *= width;
avg.y *= height;
} else {
// appear offscreen
avg = new PVector(-100, -100);
}
image(overlay, 0, 0);
fill(trackColor);
stroke(0);
circle(avg.x, avg.y, 16);
fill(0, 50);
noStroke();
rect(0, 0, 150, 30);
fill(150);
text("Framerate: " + frameRate, 0, 11);
text("Threshold: " + threshold, 0, 22);
}
void mousePressed() {
// Save color where the mouse is clicked in trackColor variable
video.loadPixels();
int loc = mouseX + mouseY*video.width;
trackColor = video.pixels[loc];
}
void mouseWheel(MouseEvent e){
threshold -= e.getCount() * 0.01;
threshold = constrain(threshold, 0, 1);
}
int the second code how to limit the range tracking area ( the x axis and the y axis) in one dimensional array. thank you

