straight-line code goes into a loop

Here is my very simple sketch:

#include "microclock.h"

MicroClock uclock;

void setup() {

  Serial.begin(115200);
  delay(1000);
  while (!Serial);

  TimeVal t = uclock.now();

}

void loop() {  }

The microclock.h header file is:

#ifndef MICROCLOCK_H
#define MICROCLOCK_H

#include "timeval.h"
#include <assert.h>
#include <stdint.h>

struct MicroClock {

  TimeVal timeNow;
  uint32_t lastMicros;

  // Returns differeince in usec times (t0 earlier than t1).
  inline uint32_t udt(uint32_t t0, uint32_t t1)
    { if (t0 < t1) return (t1 - t0); else return ((0xffffffff - t0) + t1); }

  TimeVal now() {
    uint32_t microsNow = micros();
Serial.println('a');
    uint32_t dt = udt(lastMicros, microsNow);
Serial.println('b');
    // assert(0 <= dt && dt < 1000000);
    lastMicros = microsNow;
Serial.println('c');
    // assert(0 <= dt && dt < 1000000);
    timeNow += dt;
Serial.println('d');
    return timeNow;
  }

  MicroClock() {
    this->lastMicros = micros();
    this->timeNow = u2tv(this->lastMicros);
  }

};

#endif

and here is the timeval.h header:

#ifndef TIMEVAL_H
#define TIMEVAL_H


struct TimeVal {
  long int sec;
  long int usec;
};
/*
 * stolen from C++CSP2, which in turn stole the macros
 * from linux glibc sys/time.h
 */
#define a_timercmp(a, b, CMP)      \
        (((a).sec == (b).sec) ?    \
        ((a).usec CMP (b).usec) :  \
        ((a).sec CMP (b).sec))

inline bool operator <(const TimeVal& a,const TimeVal& b)
        {return a_timercmp(a,b,<);};
inline bool operator >(const TimeVal& a,const TimeVal& b)
        {return a_timercmp(a,b,>);};
inline bool operator <=(const TimeVal& a,const TimeVal& b)
        {return a_timercmp(a,b,<=);};
inline bool operator >=(const TimeVal& a,const TimeVal& b)
        {return a_timercmp(a,b,>=);};
inline bool operator ==(const TimeVal& a,const TimeVal& b)
        {return a_timercmp(a,b,==);};
inline bool operator !=(const TimeVal& a,const TimeVal& b)
        {return ! ( a_timercmp(a,b,==) );};


#define a_timeradd(a, b, result)                     \
        do {                                         \
                (result).sec = (a).sec + (b).sec;    \
                (result).usec = (a).usec + (b).usec; \
                if ((result).usec >= 1000000)        \
                {                                    \
                        ++(result).sec;              \
                        (result).usec -= 1000000;    \
                }                                    \
        } while (0)

#define a_timersub(a, b, result)                     \
        do {                                         \
                (result).sec = (a).sec - (b).sec;    \
                (result).usec = (a).usec - (b).usec; \
                if ((result).usec < 0) {     \
                        --(result).sec;              \
                        (result).usec += 1000000;    \
                }                                    \
        } while (0)


inline void operator +=(TimeVal& a,const TimeVal& b)
        {TimeVal r; a_timeradd(a,b,r); a = r;};
inline void operator -=(TimeVal& a,const TimeVal& b)
        {TimeVal r; a_timersub(a,b,r); a = r;};

inline TimeVal operator +(const TimeVal& a,const TimeVal& b)
        {TimeVal r; a_timeradd(a,b,r); return r;};
inline TimeVal operator -(const TimeVal& a,const TimeVal& b)
        {TimeVal r; a_timersub(a,b,r); return r;};

/** these last five are my fault   m.e.g. */

inline TimeVal u2tv(unsigned long u)
        {TimeVal r; r.sec=u/1000000; r.usec=u-1000000*r.sec; return r;}

inline TimeVal operator +=(TimeVal& a, const unsigned long u)
        {TimeVal r,b=u2tv(u); a_timeradd(a,b,r); a = r;}
inline TimeVal operator -=(TimeVal& a, const unsigned long u)
        {TimeVal r,b=u2tv(u); a_timersub(a,b,r); a = r;}

inline TimeVal operator +(TimeVal& a, const unsigned long u)
        {TimeVal r,b=u2tv(u); a_timeradd(a,b,r); return r;}
inline TimeVal operator -(TimeVal& a, const unsigned long u)
        {TimeVal r,b=u2tv(u); a_timersub(a,b,r); return r;}

#undef a_timercmp
#undef a_timeradd
#undef a_timersub


/////////////////////////////////////////////////////
// Note that -100000 < usec < 1000000 in TImeVal.
// Times may be negative.
// The signs of sec and usec need not match.
/////////////////////////////////////////////////////

#endif

The way I have it instrumented, the call to the now() method of MicroClock ought to produce the printout

a
b
c

What is actually produces is

a
b
c
b
c
b
c
b
c
b
c
b
c
..and so on forever..

That is, it produces a single 'a' followed by an infinite sequence of alternating 'b' and 'c'.

I'm using version 1.8.13 of the Arduino IDE, 64-bit Linux version, running on Ubuntu 16.04. The hardware is an Adafruit Feather M0, which has a Cortex-M0 processor.

I'll be grateful to anyone who can explain this behavior.

i get the following when running you posted code on an uno, which I believe is correct

a
b
c
d

I am not sure where you got that library, but this function in microclock.h is wrong

 // Returns differeince in usec times (t0 earlier than t1).
  inline uint32_t udt(uint32_t t0, uint32_t t1)
    { if (t0 < t1) return (t1 - t0); else return ((0xffffffff - t0) + t1); }

When the timer rolls over such that t0 > t1, it is off by 1. Granted, not a very likely case and not a huge error.

I googled for this library and did not find anything.

Thanks for pointing out the rollover error.

I got the timeval.h file from the C++CSP2 project (C++CSP2).

The MicroClock.now() function ran for months until I replaced it. That was with a previous version of the IDE (1.8.9 I believe). Now I'm using 1.8.13.

Maybe I should just try to drop back to the old IDE...

i ran your code with 1.8.13