Guys I am having the mother of all mental blocks.
I am trying to base64 encode a string with this code:
void CSerialManager::encodeASCII(CString &strData)
{
CBuff<128> buffEncoded;
CBuff<4> buffHex;
CString strEncoded(buffEncoded), strHex(buffHex);
uint8_t nHex = 0;
debug.log(F(""));
for (uint8_t nI = 0; nI < strData.length(); nI++)
{
if (isAlphaNumeric(strData[nI]))
{
if (isUpperCase(strData[nI]))
{
nHex = uint8_t(strData[nI]) - uint8_t('A');
}
else if (isLowerCase(strData[nI]))
{
nHex = uint8_t(strData[nI]) - uint8_t('a') + 26;
}
else if (isDigit(strData[nI]))
{
nHex = uint8_t(strEncoded[nI]) - uint8_t('0') + 52;
}
strHex.format("%02X", nHex);
strEncoded += strHex;
debug.log(strData[nI]);
debug.log(uint8_t(strData[nI]) - uint8_t('0') + 52);
debug.log(nHex);
debug.log(strEncoded);
debug.log(F("-------"));
}
else
{
strEncoded += strData[nI];
}
debug.log(strData[nI]);
debug.log(strEncoded);
debug.log(F("-------"));
}
strData = strEncoded;
}
Here is my debug output in Serial monitor:
@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
Serial Communication Out
Sending to HC-05 = 1/6
1.........debug.log(strData[nI]); from "1/6"
53.......debug.log(uint8_t(strData[nI]) - uint8_t('0') + 52) CORRECT FROM THE BASE64 ALPHABET
4.........debug.log(nHex); HOW DID 53 decimal end up as 4 decimal?
04.......debug.log(strEncoded);
-------.. debug.log(F("-------"));
1
04
/
04/
6
58
51
04/33
6
04/33
~
04/33~
04/33~
@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
1/6 is the input string.
I am using the base64 alphabet from here:
https://www.garykessler.net/library/base64.html
53 decimal is the result of converting an ascii '1' to a base64 alphabetic string using the above table.
And it is correct - 53 is the code for ASCII '1' in base64
What I can't get my head around is HOW 53 decimal ends of as 4 decimal with this
nHex = uint8_t(strEncoded[nI]) - uint8_t('0') + 52;