JsonDocument memory declaration

Hi guys! I'm using a for loop to read a main file (saved in SPIFFS of my wemos D1 mini) and split it in 10 different smaller files, then I want to send each file over the internet through an HTTP request. Everything works great except that when I send the file through HTTP in Node-red I can see that the last object of each files is missing 2 values: AcZ and time (see the image below):

Schermata 2021-04-22 alle 12.38.19

I think it's a problem of memory declaration of the JsonDocument. I'm really knew here with C++ and ArduinoJson. Anyway I read in the documentation that doc.clear() resets the memory pool but doesn’t destroy it. And I think the problem is that I'm somehow duplicating something and exceding in memory. Is this really the problem? Thanks in advance

That's how I declared the capacity of the JsonDocument:

const size_t capacity = JSON_OBJECT_SIZE(1) + JSON_OBJECT_SIZE(4) + JSON_ARRAY_SIZE(1) + JSON_ARRAY_SIZE(10) + 10*JSON_OBJECT_SIZE(4) + 10;

And this is the function I use to publish the values:

//------ HTTP Publish ------
void httpPublish(){

  const char * outputFileNames[] = {"/out1.txt", "/out2.txt", "/out3.txt", "/out4.txt", "/out5.txt", "/out6.txt", "/out7.txt", "/out8.txt", "/out9.txt", "/out10.txt"};
  const byte outputCount = sizeof outputFileNames / sizeof outputFileNames[0];
  byte outputIndex = 0;
  
  File sourceFile;
  File destinationFile;
  
  //Serial.println(capacity);
  

  for (byte idx = 0; idx < outputCount; idx++) {

      DynamicJsonDocument doc(capacity);
      DynamicJsonDocument globalDoc(capacity);
      StaticJsonDocument <1024> localDoc;
      String aLine;
      aLine.reserve(capacity);
      
      destinationFile = LittleFS.open(outputFileNames[idx], "r");
      if (!destinationFile) {
        Serial.print(F("can't open destination "));
        Serial.println(outputFileNames[idx]);
        break;
      } else {
        Serial.print("Reading: ");
        Serial.println(outputFileNames[idx]);
        //int lineCount = 0;
        while (destinationFile.available()) {
          aLine = destinationFile.readStringUntil('\n');
          DeserializationError error = deserializeJson(localDoc, aLine);
          if (!error) globalDoc.add(localDoc);  
          else{ Serial.println("Error Writing All files");}
        }//while

        JsonObject Info = doc.createNestedObject("Info");
        Info["Battery"] = battery;
        Info["ID"] = id;
        Info["Latitudine"] = latitudine;
        Info["Longitudine"] = longitudine;
    
        
        JsonArray Data = doc.createNestedArray("Data"); 
        Data.add(globalDoc);
    
        HTTPClient http;
        //Send request
        http.begin("http://raspi-hyperink:1880/postjdoc");
        char buffer[capacity];
        size_t n = serializeJson(doc, buffer);
        
        http.POST(buffer);
        Serial.println(buffer);
        http.end();
        destinationFile.close();
      }
    }// end for   
}//end httpPublish

you could have continued the discussion in the previous thread

why do you allocate/instantiate the documents within the for loop?

Because that way I could destroy the memory pool, but as I said I'm not sure that's really the problem

can you try with something like this?

//------ HTTP Publish ------
void httpPublish() {

  const char * outputFileNames[] = {"/out1.txt", "/out2.txt", "/out3.txt", "/out4.txt", "/out5.txt", "/out6.txt", "/out7.txt", "/out8.txt", "/out9.txt", "/out10.txt"};
  const byte outputCount = sizeof outputFileNames / sizeof outputFileNames[0];

  File sourceFile;
  File destinationFile;

  DynamicJsonDocument doc(capacity);
  DynamicJsonDocument globalDoc(capacity);
  StaticJsonDocument <100> lineDoc; //  <<=== 100 should be enough for a line, shouldn't it??
  String aLine;
  aLine.reserve(100);//  <<=== 100 should be enough for a line, shouldn't it??

  for (byte idx = 0; idx < outputCount; idx++) {

    destinationFile = LittleFS.open(outputFileNames[idx], "r");
    if (!destinationFile) {
      Serial.print(F("can't open destination "));
      Serial.println(outputFileNames[idx]);
      break;
    } else {
      Serial.print("Reading: ");
      Serial.println(outputFileNames[idx]);
      doc.clear();
      globalDoc.clear();
      //int lineCount = 0;
      while (destinationFile.available()) {
        aLine = destinationFile.readStringUntil('\n');
        DeserializationError error = deserializeJson(lineDoc, aLine);
        if (!error) globalDoc.add(lineDoc);
        else {
          Serial.println("Error Writing All files");
        }
      } //while

      JsonObject Info = doc.createNestedObject("Info");
      Info["Battery"] = battery;
      Info["ID"] = id;
      Info["Latitudine"] = latitudine;
      Info["Longitudine"] = longitudine;

      JsonArray Data = doc.createNestedArray("Data");
      Data.add(globalDoc);

      HTTPClient http;
      //Send request
      http.begin("http://raspi-hyperink:1880/postjdoc");
      char buffer[capacity];
      size_t n = serializeJson(doc, buffer, capacity); // <<=== tell what's the capacity

      http.POST(buffer);
      Serial.println(buffer);
      http.end();
      destinationFile.close();
    }
  }// end for
}//end httpPublish

how big is a typical line?

Still got the same problem, the 9th object is still missing AcZ and time values. Even though in the main file they are present

Schermata 2021-04-22 alle 12.38.19

A line is like this so aLine.reserve(100) is enough:

{"AcX":16572,"AcY":972,"AcZ":2468,"time":1619019039}

This topic was automatically closed 120 days after the last reply. New replies are no longer allowed.