hey guys can someone help me replacing delay() with millis() in my code,
i want to use it for couting number of days so using delay will hang my code
#define LATCH 12
#define CLK 13
#define DATA 11
//This is the hex value of each number stored in an array by index num
int digitOne[10]= {192,249,164,176,153,146,130,248,128,24};
int digitTwo[10]= {192,249,164,176,153,146,130,248,128,24};
int i;
void setup(){
pinMode(LATCH, OUTPUT);
pinMode(CLK, OUTPUT);
pinMode(DATA, OUTPUT);
}
void loop(){
for(int i=0; i<10; i++){
for(int j=0; j<10; j++){
digitalWrite(LATCH, LOW);
shiftOut(DATA, CLK, MSBFIRST, ~digitTwo[j]); // digitTwo
shiftOut(DATA, CLK, MSBFIRST, ~digitOne[i]); // digitOne
digitalWrite(LATCH, HIGH);
delay(86400000);//this is 1 days in mseconds
}
}
}
Moderator edit: CODE TAGS. Why is it so difficult?