Please help me...

Please help us in establishing communication between our camera and servo motor+arduino. We need codes in order for us to send data, specifically, a frame we captured through our camera to the arduino+servo motor for processing and for it act/move according to data sent to it. thank you.

This isn’t going to go well…

benedick:
Please help us in establishing communication between our camera and servo motor+arduino. We need codes in order for us to send data, specifically, a frame we captured through our camera to the arduino+servo motor for processing and for it act/move according to data sent to it. thank you.

Do you understand exactly what you want the system to do?

Have you made any attempt to implement it yourself?

...and have you searched the forum for the advice normally given to people who want to hook cameras to Arduinos?

Yes, we know what we want to achieve with our system. Right now, we could get video of our targets, the insects, and identify their position/coordinates. below are the codes for this:

#include <iostream>
#include <fstream>

#include <string>
#include <vector>
#include <map>
#include <ctime>

#include "cv.h"
#include "highgui.h"

using namespace std;

struct coordinate
{
	unsigned int x, y;
	void * data;
};

struct lineBlob
{
	unsigned int min, max;
	unsigned int blobId;

	bool attached;
};

struct blob
{
	//unsigned int blobId;
	coordinate min, max;

	coordinate center;
};


void detectBlobs(IplImage* frame, IplImage* finalFrame)
{

	int blobCounter = 0;
	map<unsigned int, blob> blobs;

    unsigned char threshold = 20;


    vector< vector<lineBlob> > imgData(frame->width);

	for(int row = 0; row < frame->height; ++row)
	{

		for(int column = 0; column < frame->width; ++column)
		{

			//unsigned char byte = (unsigned char) imgStream.get();
			unsigned char byte = (unsigned char) frame->imageData[(row*frame->width)+ column];

			if(byte <= threshold)
			{
				int start = column;

				for(;byte <= threshold; byte = (unsigned char) frame->imageData[(row*frame->width)+ column], ++column);

				int stop = column-1;
				lineBlob lineBlobData = {start, stop, blobCounter, false};

				imgData[row].push_back(lineBlobData);
				blobCounter++;
			}
		}
	}

	/* Check lineBlobs for a touching lineblob on the next row */
	
	for(int row = 0; row < imgData.size(); ++row)
	{

		for(int entryLine1 = 0; entryLine1 < imgData[row].size(); ++entryLine1)
		{

			for(int entryLine2 = 0; entryLine2 < imgData[row+1].size(); ++entryLine2)
			{

				if(!((imgData[row][entryLine1].max < imgData[row+1][entryLine2].min) || (imgData[row][entryLine1].min > imgData[row+1][entryLine2].max)))
				{

					if(imgData[row+1][entryLine2].attached == false)
					{

						imgData[row+1][entryLine2].blobId = imgData[row][entryLine1].blobId;

						imgData[row+1][entryLine2].attached = true;
					}
					else

					{
						imgData[row][entryLine1].blobId = imgData[row+1][entryLine2].blobId;

						imgData[row][entryLine1].attached = true;
					}
				}
			}
		}
	}

	// Sort and group blobs

	for(int row = 0; row < imgData.size(); ++row)
	{

		for(int entry = 0; entry < imgData[row].size(); ++entry)
		{

			if(blobs.find(imgData[row][entry].blobId) == blobs.end()) // Blob does not exist yet

			{
				blob blobData = {{imgData[row][entry].min, row}, {imgData[row][entry].max, row}, {0,0}};

				blobs[imgData[row][entry].blobId] = blobData;
			}
			else

			{
				if(imgData[row][entry].min < blobs[imgData[row][entry].blobId].min.x)

					blobs[imgData[row][entry].blobId].min.x = imgData[row][entry].min;

				else if(imgData[row][entry].max > blobs[imgData[row][entry].blobId].max.x)

					blobs[imgData[row][entry].blobId].max.x = imgData[row][entry].max;

				if(row < blobs[imgData[row][entry].blobId].min.y)

					blobs[imgData[row][entry].blobId].min.y = row;

				else if(row > blobs[imgData[row][entry].blobId].max.y)

					blobs[imgData[row][entry].blobId].max.y = row;
			}
		}
	}

	// Calculate center
	for(map<unsigned int, blob>::iterator i = blobs.begin(); i != blobs.end(); ++i)
	{
		(*i).second.center.x = (*i).second.min.x + ((*i).second.max.x - (*i).second.min.x) / 2;
		(*i).second.center.y = (*i).second.min.y + ((*i).second.max.y - (*i).second.min.y) / 2;

		int size = ((*i).second.max.x - (*i).second.min.x) * ((*i).second.max.y - (*i).second.min.y);

		// Print coordinates on image, if it is large enough
		//if(size >= 0)
		//{
			CvFont font;
			cvInitFont(&font, CV_FONT_HERSHEY_PLAIN, 1.0, 1.0, 0, 1, CV_AA);

			char textBuffer[128];

			// Draw crosshair and print coordinates (just for debugging, not necessary for later multi-touch use)
			cvLine(finalFrame, cvPoint((*i).second.center.x - 5, (*i).second.center.y), cvPoint((*i).second.center.x + 5, (*i).second.center.y), cvScalar(0, 0, 153), 1);

			cvLine(finalFrame, cvPoint((*i).second.center.x, (*i).second.center.y - 5), cvPoint((*i).second.center.x, (*i).second.center.y + 5), cvScalar(0, 0, 153), 1);

			sprintf_s(textBuffer, "(%d, %d)", (*i).second.center.x, (*i).second.center.y);

			cvPutText(finalFrame, textBuffer, cvPoint((*i).second.center.x + 5, (*i).second.center.y - 5), &font, cvScalar(0, 0, 153));

			cvRectangle(finalFrame, cvPoint((*i).second.min.x, (*i).second.min.y), cvPoint((*i).second.max.x, (*i).second.max.y), cvScalar(0, 0, 153), 1);
			
			// Show center point
			//cout << "(" << (*i).second.center.x << ", " << (*i).second.center.y << ")" << endl;
		//}
	}

//return 0;
}

int main()
{
	CvCapture * capture = cvCaptureFromCAM(CV_CAP_ANY);

	if(!capture)
	{
		fprintf( stderr, "ERROR: capture is NULL \n" );

		getchar();
		return -1;
	}

	// Create a window in which the captured images will be presented
	//cvNamedWindow( "Capture", CV_WINDOW_AUTOSIZE );
	cvNamedWindow("Capture", 0);

	cvNamedWindow("Result", 0);
	cvSetCaptureProperty( capture, CV_CAP_PROP_FRAME_WIDTH, 320 );
	cvSetCaptureProperty( capture, CV_CAP_PROP_FRAME_HEIGHT, 240 );
	while(1)
	{	
		// Get one frame from the web cam

		IplImage* frame = cvQueryFrame(capture);
		if(!frame)
		{

			fprintf( stderr, "ERROR: frame is null...\n" );
			getchar();
			break;
		}

		IplImage* gsFrame;
		IplImage* finalFrame;
		gsFrame = cvCreateImage(cvSize(frame->width,frame->height), IPL_DEPTH_8U, 1);

		finalFrame = cvCloneImage(frame);
		
		// Convert image to grayscale
		cvCvtColor(frame, gsFrame, CV_BGR2GRAY);

		
		// Blur the images to reduce the false positives
		//cvSmooth(gsFrame, gsFrame, CV_BLUR);
		//cvSmooth(finalFrame, finalFrame, CV_BLUR);


		// Detection (with timer for debugging purposes)
		clock_t start = clock();
		detectBlobs(gsFrame, finalFrame);

		clock_t end = clock();
		cout << end-start << endl;

		// Show images in a nice window
		cvShowImage( "Capture", frame );
		cvShowImage( "Result", finalFrame );

		// Do not release the frame!

		cvReleaseImage(&gsFrame);
		cvReleaseImage(&finalFrame);

		//If ESC key pressed, Key=0x10001B under OpenCV 0.9.7(linux version),
		//remove higher bits using AND operator

		if( (cvWaitKey(10) & 255) == 27 ) break;
	}

	// Release the capture device housekeeping
	cvReleaseCapture( &capture );
	cvDestroyWindow( "Capture" );

	cvDestroyWindow( "Result" );
	return 0;
}

And we want the coordinate we obtained to be send to the arduino-controlled servomotor in order for it to shoot a laser beam on it. Below are the codes we made to interface with the motor:

#include "ofMain.h"
#include "servoApp.h"
#include <Servo.h> //can't be read by hehehe

Servo vert;
Servo horz;
byte xy[2]; //this array is what the Serial data will be written to


int main() {
	ofSetupOpenGL( 1024,768, OF_WINDOW );
	ofRunApp( new servoApp() );
}

void setup() {
	horz.attach(3);
	vert.attach(5);

	Serial.begin (9600); //start the communication b/w the oF and the Arduino


}

void loop() {

	//getting xy and writing this to the serial

	if(Serial.available()>1){
		xy[0]=Serial.read();
		xy[0]=Serial.read();


		float ratio = 180.0/255.0;
			horz.write(xy[0]*ratio);
			vert.write(xy[1]*ratio);
			delay(25); //delay. need to be specified
	}
}

void servoApp::setup(){
	ofSetFrameRate(60);
	threshold = 60;
	bLearnBakground = true;

	serial.enumerateDevices();
	serial.setup("COM1"); //at what port where going to write the data?

	vidGrabber.setDeviceID(5); //may not be needed daw if one camera is being use
	vidGrabber.initGrabber(320,240); //for what the numbers are for?
	colorImg.allocate(320,240);
	grayImg.allocate(320,240);
	bgImg.allocate(320,240);
}

void servoApp::update(){ //upadating and getting new frame
	vidGrabber.grabFrame();
	int i, len, largest;

	if(vidGrabber.isFrameNew()){
		colorImg = vidGrabber.getPixels();
		colorImg.mirror(false,true);
		grayImg=colorImg;
		if(bLearnBakground){
			bgImg = grayImg;
			bLearnBakground = false;
		}

		grayImg.absDiff(bgImg);
		grayImg.blur(11);
		graying.threshold(threshold);

		//do we need contourFinder?

		contourFinder.findContours(graying. 50, 20000,10,false); //graying of image, looking for blobs
		largest = -1;
		i = 0;

		//do we need to process the contour if we're going to use contourFinder?
		//and do we need figure out or get the largest contour?

		if(contourFinder.blobs.size()>0){
			int len = contourFinder.blobs.size()-1);
			while(i<len){
				if(largest==-1){
					largest = i;
				}
				else if(contourFinder.blobs.at(i).area>contourFinder.blobs.at(largest).area){
					largest = i;
				}
				i++;
			}
			
			if(ofGetFrameNum() % 4 == 0){ //writing data to the serial once every 4 frames, or about 15 frames/second
				if(largest!=-1){
					serial.writeByte(contourFinder.blobs.at(largest).centroid.y/240*255);
					serial.writeByte(contourFinder.blobs.at(largest).centroid.x/320*255);
				}
			}
		}
	}
}

hope this could help.

Right now, we could get video of our targets, the insects, and identify their position/coordinates. And we want the coordinate we obtained to be send to the arduino-controlled servomotor in order for it to shoot a laser beam on it.

Insect coordinates and laser beams?! Why do you want to fry the insects?

What kind of servo are you using? I assume 2 servos to adjust to the position ?

@ pauly, yup, as in coordinates and laser beams. but right now were using dummy targets. we could identify the target already and as well as its position. actually, we are targeting mosquitoes only, because we have a dengue epidemic in our country. we have already codes to control the arduino+laser beam and its working. the only problem we are encountering, is on how we could establish serial connection between our camera+openframeworks to our arduinoIDE+motor/laser system. hope you could help us.

@winner10920, i'm sorry, its my mistake, we are actually using two STEPPER MOTORS.

If its just a serial connection why doesn't it work as normal? How is the data being sent over serial to the arduino?

I suggest you step back and try to describe what your system is intended to do in terms of the major components (PC, Arduino, camera, laser, laser steering mechanism?), how they are connected and how the overall system works. You have posted code that seems to be doing video processing and something that references servos, but obviously none of that code is going to run on an Arduino.

I'm envisaging a webcam with image processing on a PC, sending steering and firing commands to an Arduino to point and fire a laser. From the Arduino point of view that seems feasible. I imagine there are plenty of paintball turrets working on the same approach.

But is it realistic as a mosquito killer? I don't see it, personally.

I'm thinking that given the size/speed of a mosquito, and the processing delays and mechanical latency in an aiming solution, and the relatively poor resolution of a video cam, the chances of hitting an individual mosquito are negligible. If you just fire the thing continuously and wave it in the general direction of a cloud of insects you might hit one or two by blind chance, but is a brief flash of laser going to kill an insect? If it is, it would be extremely dangerous for any human in the area. And have you figured out how many insects you'd need to kill to make a dent in your plague?

Serial protocol is probably the easiest way to send data between an arduino and another gizmo. Below is simple two servo control code for the arduino. You need to some how transform your x/y cordinate info into appropriate servo positions and send it to the arduino via the arduino serial port.

// zoomkat 12-13-11 serial servo (2) test
// for writeMicroseconds, use a value like 1500
// for IDE 1.0
// Powering a servo from the arduino usually DOES NOT WORK.
// two servo setup with two servo commands
// send eight character string like 15001500 or 14501550

#include <Servo.h> 
String readString, servo1, servo2;
Servo myservo1;  // create servo object to control a servo 
Servo myservo2;

void setup() {
  Serial.begin(9600);
  myservo1.attach(6);  //the pin for the servo control 
  myservo2.attach(7);
  Serial.println("two-servo-test-1.0"); // so I can keep track of what is loaded
}

void loop() {

  while (Serial.available()) {
    delay(3);  //delay to allow buffer to fill 
    if (Serial.available() >0) {
      char c = Serial.read();  //gets one byte from serial buffer
      readString += c; //makes the string readString
    } 
  }

  if (readString.length() >0) {
      Serial.println(readString); //see what was received
      
      // expect a string like 07002100 containing the two servo positions      
      servo1 = readString.substring(0, 4); //get the first four characters
      servo2 = readString.substring(4, 8); //get the next four characters 
      
      Serial.println(servo1);  //print to serial monitor to see results
      Serial.println(servo2);

      int n1 = servo1.toInt();
      int n2 = servo2.toInt();
      
      myservo1.writeMicroseconds(n1); //set servo position 
      myservo2.writeMicroseconds(n2);
    readString="";
  } 
}