So, having looked at the -E output of the compiles I guess it has something to do with how enum defines it results. And how that gets parsed here:
Goes over my head in any case.
#define _GL_FLOAT_STRLEN_BOUND_L(t, pointlen) \
(1 + _GL_##t##_PREC_BOUND + pointlen + 1 \
+ _GL_FLOAT_EXPONENT_STRLEN_BOUND (t##_MIN_10_EXP, t##_MAX_10_EXP))
#define FLT_STRLEN_BOUND_L(pointlen) _GL_FLOAT_STRLEN_BOUND_L ( FLT, pointlen)
#define DBL_STRLEN_BOUND_L(pointlen) _GL_FLOAT_STRLEN_BOUND_L ( DBL, pointlen)
#define LDBL_STRLEN_BOUND_L(pointlen) _GL_FLOAT_STRLEN_BOUND_L (LDBL, pointlen)
/* Looser bounds that are locale-independent and are integral constant
expressions. */
#define FLT_STRLEN_BOUND FLT_STRLEN_BOUND_L (MB_LEN_MAX)
#define DBL_STRLEN_BOUND DBL_STRLEN_BOUND_L (MB_LEN_MAX)
#define LDBL_STRLEN_BOUND LDBL_STRLEN_BOUND_L (MB_LEN_MAX)
/* Looser, locale-independent bounds that include the trailing null byte. */
#define FLT_BUFSIZE_BOUND ( FLT_STRLEN_BOUND + 1)
#define DBL_BUFSIZE_BOUND ( DBL_STRLEN_BOUND + 1)
#define LDBL_BUFSIZE_BOUND (LDBL_STRLEN_BOUND + 1)
#endif /* _GL_FTOASTR_H */
Then in od.c we have this:
#define PRINT_FLOATTYPE(N, T, FTOASTR, BUFSIZE) \
PRINT_FIELDS (N, T, fmt_string _GL_UNUSED, \
char buf[BUFSIZE]; \
FTOASTR (buf, sizeof buf, 0, 0, x); \
xprintf ("%*s", adjusted_width, buf))
PRINT_TYPE (print_s_char, signed char)
PRINT_TYPE (print_char, unsigned char)
PRINT_TYPE (print_s_short, short int)
PRINT_TYPE (print_short, unsigned short int)
PRINT_TYPE (print_int, unsigned int)
PRINT_TYPE (print_long, unsigned long int)
PRINT_TYPE (print_long_long, unsigned_long_long_int)
PRINT_FLOATTYPE (print_float, float, ftoastr,
FLT_BUFSIZE_BOUND)
PRINT_FLOATTYPE (print_double, double, dtoastr, DBL_BUFSIZE_BOUND)
PRINT_FLOATTYPE (print_long_double, long double, ldtoastr, LDBL_BUFSIZE_BOUND)
which in od.i becomes (only the first PRINT_FLOATTYPE output), for readability.