Here's some processing code I used at one point to get the kinect to talk to the arduino (I think it was mostly stolen from a book called Making Things See)
import processing.serial.*;
import SimpleOpenNI.*;
SimpleOpenNI kinect;
Serial arduinoport;
void setup() {
size(1024, 480);
textSize(20);
if (Serial.list().length < 1) {
println("No serial port seen. Quitting.");
exit();
}
if (Serial.list().length > 1) {
print("There are multiple serial ports. Using the first one: ");
println(Serial.list()[0]);
}
arduinoport = new Serial(this, Serial.list()[0], 115200);
//delay(2000); // Wait for arduino to reboot
kinect = new SimpleOpenNI(this);
kinect.enableDepth();
kinect.enableUser(SimpleOpenNI.SKEL_PROFILE_ALL);
}
void draw() {
kinect.update();
PImage depth = kinect.depthImage();
image(depth, 0, 0);
IntVector userList = new IntVector();
kinect.getUsers(userList);
while (arduinoport.available() > 0) {
char inByte = (char)arduinoport.read();
print(inByte);
}
boolean stopRobot = true; // Only move robot if someone is there
for (int i=0; i < userList.size(); i++) {
int userId = userList.get(i);
if (kinect.isTrackingSkeleton(userId)) {
float x = 0;
float y = 0;
float rotation = 0;
// Get the position of the torso
PVector torsopos = new PVector();
float torsoconfidence = kinect.getJointPositionSkeleton(userId, SimpleOpenNI.SKEL_TORSO, torsopos);
if (torsoconfidence > 0.5) {
displayJoint(torsopos);
float userPos = torsopos.x;
userPos = map(userPos, -600, 600, 1000, -1000);
userPos = constrain(userPos, -1000, 1000);
}
PVector rightpos = new PVector();
float rightconfidence = kinect.getJointPositionSkeleton(userId, SimpleOpenNI.SKEL_RIGHT_HAND, rightpos);
PVector leftpos = new PVector();
float leftconfidence = kinect.getJointPositionSkeleton(userId, SimpleOpenNI.SKEL_LEFT_HAND, leftpos);
if (rightconfidence > 0.5 && leftconfidence > 0.5) {
displayJoint(rightpos);
displayJoint(leftpos);
PVector differenceVector = PVector.sub(rightpos, leftpos);
x = constrain(map(differenceVector.x, -600, 600, -1000, 1000), -1000, 1000);
y = constrain(map(differenceVector.z, -200, 200, -1000, 1000), -1000, 1000);
if (differenceVector.y > 0) { // If left hand is higher
x = -x;
y = -y;
}
//print(x); print("\t"); print (y); print("\t");
}
//println(userPos);
String arduinocmd = String.format("<r,%.0f,%.0f,%.0f>", x, y, rotation);//"<r,"+x+","+y+"," + rotation + ">";
fill(50);
rect(725, 175, 250, 250);
fill(255);
ellipse(map(x, -1000, 1000, -100, 100) + 850, map(y, -1000, 1000, -100, 100) + 300, 20, 20);
//println (arduinocmd);
arduinoport.write(arduinocmd);
stopRobot = false;
}
}
if (stopRobot) {
arduinoport.write("<r,0,0,0>");
}
}
void onNewUser(int userId) {
background(0);
fill(255);
text("Start pose detection", 700, 30);
kinect.startPoseDetection("Psi", userId);
}
void onEndCalibration(int userId, boolean successful) {
background(0);
fill(255);
if (successful) {
text(" User Calibrated!", 700, 30);
kinect.startTrackingSkeleton(userId);
}
else {
text(" Failed to calibrate user", 700, 30);
kinect.startPoseDetection("Psi", userId);
}
}
void onStartPose(String pose, int userId) {
background(0);
fill(255);
text("Started pose for user " + userId, 700, 30);
kinect.stopPoseDetection(userId);
kinect.requestCalibrationSkeleton(userId, true);
}
void displayJoint(PVector joint) {
PVector converted = new PVector();
kinect.convertRealWorldToProjective(joint, converted);
pushMatrix();
noStroke();
fill(255,0,0);
ellipse(converted.x, converted.y, 20, 20);
popMatrix();
}
And here's corresponding arduino code to control a robot with a library I wrote myself (just replace bot.RunMotors with whatever you want to move): arduino/circlebot/move_with_serial at master · AdamCDunlap/arduino · GitHub