I've made a stepper motor gear drive for my telescope designed to track the star's movement across the sky for astrophotography.
I nailed a 30 second exposure with no star trails, but other shots have various levels of trailing, so I know something isn't consistent. Certainly this might be a mechanical issue, my telescope mount is cheap and I'm pretty sure it needs various levels of torque to consistently drive it forward evenly, but I also want to rule out a programming issue.
My function raStandard() is the primary method the motor is stepped at a specific period:
#include <Encoder.h>
Encoder rapidAdv(11, 10); //defines rot encoder input pins
int rotaryButt = 12; //encoders button pin
int advButt = 9; //pin for button to rapidly advance motor a bit
int stepSize = 4; //enable 1/2 step microstepping. typically set to HIGH
int dir = 3; //sets direction of motor
int stp = 2; //steps motor
unsigned long lastAdvance = 0; //time-keeping variable. used to store the last time the motor stepped.
long orbitSpeed = 300; //Sets default standard rotational pace. rotary encoder changes this as needed
long lastRotaryPos = -999;
long currentPos = -999;
int rotButtState = LOW; //hold encoder button's state
long adjRotaryDiff = 0; //holds the difference in rotary encoder value
int advButtState = LOW; //hold state of quick advance button
void setup()
{
pinMode(stp, OUTPUT);
pinMode(advButt, INPUT);
pinMode(dir, OUTPUT);
pinMode(stepSize, OUTPUT);
pinMode(rotaryButt, INPUT_PULLUP); //inverts logic, pin goes LOW when button pressed
lastRotaryPos = rapidAdv.read();
digitalWrite(stepSize, HIGH); //sets half-steps as deafualt
Serial.begin(9600);
}
void loop()
{
raStandard();
Serial.println(orbitSpeed); //used this to monitor the actual period needed to accurately track stars.
currentPos = rapidAdv.read(); //next 3 lines check status of inputs
rotButtState = digitalRead(rotaryButt);
advButtState = digitalRead(advButt);
adjRotaryDiff = (lastRotaryPos - currentPos); //determines how much encoder has moved
if (advButtState == HIGH) //advances motor quickly for a bit when pressed
{
digitalWrite(stepSize, LOW); //sets stepstick board to full step
for (int t=0; t <= 20; t++){
Prograde();
delay(35);
}
digitalWrite(stepSize, HIGH); //once completed, resumes half steps for normal operation.
}
if ((rotButtState == LOW) and (adjRotaryDiff <= -1))
{
orbitSpeed++;
lastRotaryPos = rapidAdv.read();
}
if ((rotButtState == LOW) and (adjRotaryDiff >= +1))
{
orbitSpeed--;
lastRotaryPos = rapidAdv.read();
}
if ((adjRotaryDiff >= 4) and rotButtState == HIGH)
{
orbitSpeed--;
lastRotaryPos = rapidAdv.read();
}
if ((adjRotaryDiff <= -4) and rotButtState == HIGH)
{
orbitSpeed--;
lastRotaryPos = rapidAdv.read();
}
}
void raStandard() //default mode, advances the motor at the period designated by 'orbitSpeed'
{
unsigned long currentTime = millis();
if ((currentTime - lastAdvance) >= orbitSpeed)
{
Prograde();
lastAdvance = millis();
}
}
void Prograde() //rotates motor one step in direction of earth's orbit
{
digitalWrite(dir, HIGH);
digitalWrite(stp, HIGH);
delay(10);
digitalWrite(stp, LOW);
}
nothing seems like it would delay the normal operation, so it should advance the motor every 300 microsecond (or whatever orbitSpeed has been set to) if no other inputs are being pressed. I just want a way to check when exactly the motor does advance, and be sure it will not being changed by my viewing it.
If I have a serial output 'lastAdvance,' is there any reason it would be inaccurate? how much CPU time does a standard serial print consume?