Autonomous Car Using RPLIDAR A1

Hello! I, trying to build an autonomous car using an RPLIDAR A1 and a trazas 4x4 slasher. My idea for the logic is to have the sensor scan a 180 degree field in front of the car. Then whichever angle it reads is farthest the car will turn its tires towards that direction. I’m having some trouble with the speed of the code. It’s not running fast enough and I wanted to know if it was possible to make the code more efficient. I’ve asked ChatGPT and it says something about using DRAM. Any Ideas?
Here’s my code:

#include <Servo.h>
 
RPLidar lidar;
Servo motor;
Servo steer;
#define RPLIDAR_MOTOR 3
int i = 0;
int arraysize = 0;
int degreeArray[200];
int distanceArray[200];                      
int maxValue = distanceArray[0];  
int index;    
int wantdistance = maxValue;  








const int RPL = 200;    
const int speedmed = 1500;
int speedremap;
int speedread;
int forwardSpeed;
int backwardSpeed;
         
void setup() {
  // bind the RPLIDAR driver to the arduino hardware serial
  lidar.begin(Serial);
  motor.attach(11);
  steer.attach(9);
  Serial.begin(115200);
 
 
  // set pin modes
  pinMode(RPLIDAR_MOTOR, OUTPUT);
  motor.writeMicroseconds(speedmed + 120);
}




//------------
void loop() {
  speedread = analogRead(A0);
 
  speedremap = map(speedread, 0, 1023, 0, 500);
  forwardSpeed = speedmed + speedremap;
  backwardSpeed = speedmed - speedremap;








  motor.writeMicroseconds(forwardSpeed);
  if (IS_OK(lidar.waitPoint())) {
    float distance = lidar.getCurrentPoint().distance;
    float angle    = lidar.getCurrentPoint().angle;
    Serial.print("Distance:  ");
    Serial.print(distance);
    Serial.print("Angle:  ");
    Serial.print(angle);
    for (i = 0; i < RPL; i++) {
      if (int(angle) < 180) {
        degreeArray[int(angle)] = angle;
        distanceArray[int(angle)] = distance;








      }
    }
    for (int i = 0; i < RPL; i++) {
      if (distanceArray[i] > maxValue) {
        maxValue = distanceArray[i];
        index = i;
      }
    }
//    wantdistance = maxValue;
//    for (i = 0; i < 72; i++) {
//      if (wantdistance = distanceArray[i]) {
//        index = i;
//        break;
//      }
//    }
    steer.write(degreeArray[index]);
    delay(1000);
    steer.write(degreeArray[index]);
    delay(1000);
    steer.write(degreeArray[index]);
    delay(1000);
    steer.write(degreeArray[index]);
   
    for (i = 0; i < RPL; i++){
      distanceArray[i] = 0;
      degreeArray[i] = 0;
    }
 
  } else {
    analogWrite(RPLIDAR_MOTOR, 0); //stop the rplidar motor
   
    // try to detect RPLIDAR...
    rplidar_response_device_info_t info;
    if (IS_OK(lidar.getDeviceInfo(info, 100))) {
       // detected...
       lidar.startScan();
       
       // start motor rotating at max allowed speed
       analogWrite(RPLIDAR_MOTOR, 255);
       
















 
    }
  }
}







All these delays will have a remarkably large effect on the "speed of the code".
Why are they in the code?

    delay(1000);
    steer.write(degreeArray[index]);

Ok thank you! But wouldn’t it allow the scanner to reset, that’s what the GitHub code does.

I’ve skimmed a bit and some sources recommend the millis() command, could I incorporate this?

I have no idea what you mean by that, but you should only very rarely need delay().

Ok, I’ll get rid of it. Final question, is it possible to rearrange the code so it’s easier to process. I only ask as when I tested retrieving elements on the serial monitor, it took well over 1sec per loop.

Do you think it would be better? If so could you provide or explain how to implement this. Thank you

Tutorial on using millis() to time events, instead of delay() .

This topic was automatically closed 180 days after the last reply. New replies are no longer allowed.