use uint16_t instead of magic ifdefs, leaving support for bit type in there incase we support it some day

This commit is contained in:
mrdudz
2020-07-21 23:59:05 +02:00
parent 6d518a61a5
commit 4a9c5ff63b
8 changed files with 41 additions and 121 deletions

View File

@@ -6,6 +6,9 @@
#include <stdio.h>
#include <limits.h>
#include <stdint.h>
/* #define SUPPORT_BIT_TYPES */
unsigned char success=0;
unsigned char failures=0;
@@ -14,22 +17,10 @@ unsigned char dummy=0;
#ifdef SUPPORT_BIT_TYPES
bit bit0 = 0;
#endif
#ifdef SIZEOF_INT_16BIT
#if defined(__LINUX__) || defined(LINUX)
unsigned short aint0 = 0;
unsigned short aint1 = 0;
#else
unsigned int aint0 = 0;
unsigned int aint1 = 0;
uint16_t aint0 = 0;
uint16_t aint1 = 0;
#endif
#else
unsigned int aint0 = 0;
unsigned int aint1 = 0;
#endif
unsigned char uchar0 = 0;
unsigned char uchar1 = 0;
unsigned char uchar2 = 0;