ADC 16bits programming problem - ACD161s626

maybe this would work...

#include <SPI.h>

const int spi_ss = 48;      // set SPI SS Pin
float v_out;             // decimal voltage
float vref = 2.5;       // voltage on Vref pin

union {
  long Dout;
  byte bytes[4];
} spi_bytes;


void setup() {
  // put your setup code here, to run once:
  Serial.begin(9600);          // begin serial and set speed
  pinMode(spi_ss, OUTPUT);     // Set SPI slave select pin as output
  digitalWrite(spi_ss, HIGH);  // Make sure spi_ss is held high
  SPI.beginTransaction(SPISettings(5000000, MSBFIRST, SPI_MODE3));  // set speed bit format and clock/data polarity while starting SPI transaction
  SPI.begin();                 // begin SPI
}

void loop() {

  digitalWrite(spi_ss, LOW);

  spi_bytes.Dout = 0;

  for (int i = 2; i >=0; --i) {
    spi_bytes.bytes[i] = SPI.transfer(0);
  }

  digitalWrite(spi_ss, HIGH); // wite LTC CS pin high to stop LTC from transmitting zeros.

  spi_bytes.Dout >>= 2;

  v_out = vref * (int(spi_bytes.Dout) / 32767.0);
  // finaly we recover the true value in volts. 1LSB = vref/32767
  // 15bits ADC = 2^15 - 1 = 32767 (signed 16 bit ADC)
  Serial.println(v_out, 3);
  delay(250);
}