Hello everyone,
I receive 2 bytes which should be converted to a long. To achieve this I bitshift the first value and add it to the last.
#include <Arduino.h>
// #include <Logger.h>
// bool DEBUGLOG = true;
void setup() {
Serial.begin(9600);
Serial.println("--- Binary Calculator ---");
Serial.println("LONG");
uint8_t v1 = 0x96;
uint8_t v2 = 0x01;
Serial.print("v1\t"); Serial.print(v1,DEC);Serial.print("\t");Serial.print(v1,HEX);Serial.print("\t");Serial.println(v1,BIN);
Serial.print("v2\t"); Serial.print(v2,DEC);Serial.print("\t");Serial.print(v2,HEX);Serial.print("\t");Serial.println(v2,BIN);
long v11 = 0 + (v1<<8);
long v22 = v2 & 0xFF;
Serial.print("v11\t"); Serial.print(v11,DEC);Serial.print("\t");Serial.print(v11,HEX);Serial.print("\t");Serial.println(v11,BIN);
Serial.print("v22\t"); Serial.print(v22,DEC);Serial.print("\t");Serial.print(v22,HEX);Serial.print("\t");Serial.println(v22,BIN);
Serial.println(" ");
long v = ((v1<<8) + ((v2)&0xFF));
// expected 1501, 00009601, 10010110 00000001
Serial.print("v\t"); Serial.print(v,DEC);Serial.print("\t");Serial.print(v,HEX);Serial.print("\t");Serial.println(v,BIN);
Serial.println(" ");
Serial.println(" ");
Serial.println("INT");
uint8_t v111 = 0x96;
uint8_t v222 = 0x01;
Serial.print("v111\t"); Serial.print(v111,DEC);Serial.print("\t");Serial.print(v111,HEX);Serial.print("\t");Serial.println(v111,BIN);
Serial.print("v222\t"); Serial.print(v222,DEC);Serial.print("\t");Serial.print(v222,HEX);Serial.print("\t");Serial.println(v222,BIN);
short v_int = ((v111<<8) + (v222&0xFF));
}
void loop() {
delay(1000);
}
Since long has 4 bytes I would expect:
00000000 00000000 10010110 00000001
but the result of the monitor is
11111111111111111001011000000001
even just using integer (2byte) should result in
10010110 00000001
but also gives
11111111 11111111 10010110 00000001
-27135 in Decimal -> expected 1501
I am unable to find the error.
