I thought it may be fun to write a morse decoder for the arduino (it uses a ldr but thats not particular important).
Here is my code:
#define DOT '.'
#define DASH '-'
#define UNIT_TIME_MS 100
int BASE_LIGHT_LEVEL;
const int LIGHT_SENSITIVITY = 200;
int pin = 0;
void setup() {
Serial.begin(115200);
BASE_LIGHT_LEVEL = calibrate_sensor(pin);
Serial.println(BASE_LIGHT_LEVEL);
}
String test;
void loop() {
if (get_light_level(pin,BASE_LIGHT_LEVEL))
listen_morse();
}
void listen_morse()
{
delay (60); // read 60ms into each time unit
while(true)
{
char a = nextChar();
Serial.println(a);
}
}
char nextChar()
{
static char buffer [21] = "";
while (true)
{
int len = strlen(buffer);
buffer [len] = get_light_level(pin,BASE_LIGHT_LEVEL) + 48;
buffer [len + 1] = '\0';
delay (UNIT_TIME_MS);
if (strcmp (buffer,"0000000") == 0) //return space
{
buffer[0] = '\0';
return ' ';
}
if (strstr(buffer,"0001")) //new char
{
buffer [strlen(buffer) - 4] = '\0';
char ascii = morse_to_ascii(raw_to_morse(buffer));
buffer[0] = '1';
buffer[1] = '\0';
return ascii;
}
else if (strstr(buffer,"0000") && strlen(strstr(buffer,"0000")) == 4 ) // must be space
{
buffer [strlen(buffer) - 4] = '\0';
char ascii = morse_to_ascii(raw_to_morse(buffer));
buffer [4] = '\0';
memset (buffer,'0',4);
return ascii;
}
}
}
char* raw_to_morse (const char* raw_input)
{
static char morse [10] = "";
memset (morse,'\0',10); //reset at the beginning of each call
for (int i=0; i < strlen (raw_input);)
{
if (raw_input [i + 1] == '0' || i + 1 == strlen (raw_input)) // if dot
{
morse [strlen(morse)] = DOT;
if (i + 1 >= strlen (raw_input)) //end of morse word
break;
else
i = i + 2; //move to next morse symbol
}
else //if dash
{
morse [strlen(morse)] = DASH;
if (i + 3 >= strlen (raw_input)) //end of morse word
break;
else //move to next morse symbol
i = i + 4;
}
}
return morse;
}
char morse_to_ascii (const char* morse)
{
//letters
if (strcmp (morse,".-") == 0)
return 'a';
if (strcmp (morse,"-...") == 0)
return 'b';
if (strcmp (morse,"-.-.") == 0)
return 'c';
if (strcmp (morse,"-..") == 0)
return 'd';
if (strcmp (morse,".") == 0)
return 'e';
if (strcmp (morse,"..-.") == 0)
return 'f';
if (strcmp (morse,"--.") == 0)
return 'g';
if (strcmp (morse,"....") == 0)
return 'h';
if (strcmp (morse,"..") == 0)
return 'i';
if (strcmp (morse,".---") == 0)
return 'j';
if (strcmp (morse,"-.-") == 0)
return 'k';
if (strcmp (morse,".-..") == 0)
return 'l';
if (strcmp (morse,"--") == 0)
return 'm';
if (strcmp (morse,"-.") == 0)
return 'n';
if (strcmp (morse,"---") == 0)
return 'o';
if (strcmp (morse,".--.") == 0)
return 'p';
if (strcmp (morse,"--.-") == 0)
return 'q';
if (strcmp (morse,".-.") == 0)
return 'r';
if (strcmp (morse,"...") == 0)
return 's';
if (strcmp (morse,"-") == 0)
return 't';
if (strcmp (morse,"..-") == 0)
return 'u';
if (strcmp (morse,"...-") == 0)
return 'v';
if (strcmp (morse,".--") == 0)
return 'w';
if (strcmp (morse,"-..-") == 0)
return 'x';
if (strcmp (morse,"-.--") == 0)
return 'y';
if (strcmp (morse,"--..") == 0)
return 'z';
//numbers
//prosigns
if (strcmp(morse,".-.-."))
return '+';
}
bool get_light_level (byte pin,int BASE_LIGHT_LEVEL)
{
if (BASE_LIGHT_LEVEL - analogRead(pin) > LIGHT_SENSITIVITY)
{
//Serial.print(true);
return true;
}
else
{
//Serial.print(false);
return false;
}
}
int calibrate_sensor (int pin)
{
int total_light_val = 0;
for (int i=0; i <3;i++) //3 AVERAGES
{
total_light_val = total_light_val + analogRead (pin);
delay (100);
}
return total_light_val/3; //return average base light level
}
It still needs a bit of work but seems to fundamentally work except for one strange issue.
The delay in listen_morse (60ms) is designed to offset the read a certain way through each 100ms time unit. However, if I changed it to 50ms for example the decoding seems to break. For the life of me I can’t understand why this is - I could understand if it was equal to or greater than 100ms.
Any ideas on whats going on here?
Thanks