Hi all,
I've been creating projects with Arduino for years now and have had a lot of fun. Now I'm trying to get my feet wet in some python. My goal in this project is to use openCV's facial detection to track a face in a video and output the location to an Arduino to control a servo. My facial detection python works great and I am able to calculate the center point of the face and store it in centerX and centerY variables. My simple Arduino code takes any value from 0 to 400 (the centerX value range) and maps it from 0 to 180 to control a servo. After some research, I found that you can send serial data from python running on a laptop to an Arduino with PySerial. After figuring out the usage, I implemented it and I can see that the Arduino is receiving the data because of the RX led. The only problem is that it doesn't seem to want to control the servo. I hooked up the rx and tx lines of the Arduino to another Arduino and opened up the serial monitor of the second one so I could make sure that the right info is coming through. All that shows up is random ascii characters. I know that this seems like a baud rate issue but I double checked and every Arduino was set to 9600 and the python program was too. From what I've seen nobody else has had this issue and I'm completely confused why this is happening.
(excuse my code neatness I haven't cleaned it up yet)
# USAGE
# python detect_faces_video.py --prototxt deploy.prototxt.txt --model res10_300x300_ssd_iter_140000.caffemodel
# import the necessary packages
from imutils.video import VideoStream
import numpy as np
import argparse
import imutils
import time
import cv2
import serial
#Serial setup
ser=serial.Serial()
ser.baudrate=9600
ser.port= '/dev/ttyUSB0'
ser.parity=serial.PARITY_ODD
ser.stopbits=serial.STOPBITS_ONE
ser.bytesize=serial.EIGHTBITS
ser.open()
# construct the argument parse and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-p", "--prototxt", required=True,
help="path to Caffe 'deploy' prototxt file")
ap.add_argument("-m", "--model", required=True,
help="path to Caffe pre-trained model")
ap.add_argument("-c", "--confidence", type=float, default=0.5,
help="minimum probability to filter weak detections")
args = vars(ap.parse_args())
# load our serialized model from disk
print("[INFO] loading model...")
net = cv2.dnn.readNetFromCaffe(args["prototxt"], args["model"])
# initialize the video stream and allow the cammera sensor to warmup
print("[INFO] starting video stream...")
vs = VideoStream(src=0).start()
time.sleep(0.5)
# loop over the frames from the video stream
while True:
# grab the frame from the threaded video stream and resize it
# to have a maximum width of 400 pixels
frame = vs.read()
frame = imutils.resize(frame, width=400)
# grab the frame dimensions and convert it to a blob
(h, w) = frame.shape[:2]
blob = cv2.dnn.blobFromImage(cv2.resize(frame, (300, 300)), 1.0,
(300, 300), (104.0, 177.0, 123.0))
# pass the blob through the network and obtain the detections and
# predictions
net.setInput(blob)
detections = net.forward()
# loop over the detections
for i in range(0, detections.shape[2]):
# extract the confidence (i.e., probability) associated with the
# prediction
confidence = detections[0, 0, i, 2]
# filter out weak detections by ensuring the `confidence` is
# greater than the minimum confidence
if confidence < args["confidence"]:
continue
# compute the (x, y)-coordinates of the bounding box for the
# object
box = detections[0, 0, i, 3:7] * np.array([w, h, w, h])
(startX, startY, endX, endY) = box.astype("int")
# draw the bounding box of the face along with the associated
# probability
centerX = ((startX + endX)/2)
centerY = ((startY + endY)/2)
text2 = "{}".format(int(centerX))
text = "{:.2f}%".format(confidence * 100)
y = startY - 10 if startY - 10 > 10 else startY + 10
cv2.rectangle(frame, (startX, startY), (endX, endY),
(0, 0, 255), 2)
cv2.putText(frame, text2, (startX, y-10),
cv2.FONT_HERSHEY_SIMPLEX, 0.45, (0, 0, 255), 2)
cv2.putText(frame, text, (startX, y+5),
cv2.FONT_HERSHEY_SIMPLEX, 0.45, (0, 0, 255), 2)
cv2.line(frame, (startX, startY), (endX, endY), (0, 0, 255), 2)
cv2.line(frame, (startX, endY), (endX, startY), (0, 0, 255), 2)
cv2.circle(frame, (int(centerX), int(centerY)), 8, (0, 255, 0), 2)
#Serial transmission
ser.write(centerX)
# show the output frame
cv2.imshow("Frame", frame)
key = cv2.waitKey(1) & 0xFF
# if the `q` key was pressed, break from the loop
if key == ord("q"):
break
# do a bit of cleanup
cv2.destroyAllWindows()
vs.stop()
#include <Servo.h>
Servo myservo;
int pos = 0;
void setup()
{
Serial.begin(9600);
myservo.attach(9);
myservo.write(100);
}
void loop() {
if (Serial.available()) {
int state = Serial.parseInt();
if (state > 0 && state <= 180) {
int x = map(state, 0, 400, 0, 180);
myservo.write(state);
}
}
}
Thanks,
Noah