X-Git-Url: http://git.linex4red.de/pub/USBasp.git/blobdiff_plain/b1528a12565c214f34e4e26b5032980780ae4d24..b35f93a372d8c872aebab4e3626bfc58d85b84d7:/LUFA/Common/Common.h diff --git a/LUFA/Common/Common.h b/LUFA/Common/Common.h index e24906d4d..1d81d481e 100644 --- a/LUFA/Common/Common.h +++ b/LUFA/Common/Common.h @@ -84,19 +84,15 @@ #define ARCH_LITTLE_ENDIAN #include "Endianness.h" - #elif (ARCH == ARCH_UC3B) + #elif (ARCH == ARCH_UC3) #include // === TODO: Find abstracted way to handle these === #define ISR(Name) void Name (void) __attribute__((__interrupt__)); void Name (void) - #define EEMEM #define PROGMEM const #define ATOMIC_BLOCK(x) if (1) #define ATOMIC_RESTORESTATE #define pgm_read_byte(x) *x - #define eeprom_read_byte(x) *x - #define eeprom_update_byte(x, y) *x = y - #define eeprom_write_byte(x, y) *x = y #define _delay_ms(x) #define memcmp_P(...) memcmp(__VA_ARGS__) #define memcpy_P(...) memcpy(__VA_ARGS__) @@ -218,6 +214,12 @@ */ #define GCC_FORCE_POINTER_ACCESS(StructPtr) __asm__ __volatile__("" : "=b" (StructPtr) : "0" (StructPtr)) + /** Forces GCC to create a memory barrier, ensuring that memory accesses are not reordered past the barrier point. + * This can be used before ordering-critical operations, to ensure that the compiler does not re-order the resulting + * assembly output in an unexpected manner on sections of code that are ordering-specific. + */ + #define GCC_MEMORY_BARRIER() __asm__ __volatile__("" ::: "memory"); + /* Inline Functions: */ /** Function to reverse the individual bits in a byte - i.e. bit 7 is moved to bit 0, bit 6 to bit 1, * etc.