X-Git-Url: http://git.linex4red.de/pub/USBasp.git/blobdiff_plain/f301a4a21f34bea6c41872258a9dab3263f04e2a..f6f4ac588cec25ad7f9baa67fc3c22c9306f962e:/LUFA/Common/Common.h diff --git a/LUFA/Common/Common.h b/LUFA/Common/Common.h index c2eeeade5..b7d564aa3 100644 --- a/LUFA/Common/Common.h +++ b/LUFA/Common/Common.h @@ -53,9 +53,7 @@ #define __LUFA_COMMON_H__ /* Macros: */ - #if !defined(__DOXYGEN__) - #define __INCLUDE_FROM_COMMON_H - #endif + #define __INCLUDE_FROM_COMMON_H /* Includes: */ #include @@ -84,7 +82,7 @@ #define ARCH_LITTLE_ENDIAN #include "Endianness.h" - #elif (ARCH == ARCH_UC3B) + #elif (ARCH == ARCH_UC3) #include // === TODO: Find abstracted way to handle these === @@ -214,6 +212,12 @@ */ #define GCC_FORCE_POINTER_ACCESS(StructPtr) __asm__ __volatile__("" : "=b" (StructPtr) : "0" (StructPtr)) + /** Forces GCC to create a memory barrier, ensuring that memory accesses are not reordered past the barrier point. + * This can be used before ordering-critical operations, to ensure that the compiler does not re-order the resulting + * assembly output in an unexpected manner on sections of code that are ordering-specific. + */ + #define GCC_MEMORY_BARRIER() __asm__ __volatile__("" ::: "memory"); + /* Inline Functions: */ /** Function to reverse the individual bits in a byte - i.e. bit 7 is moved to bit 0, bit 6 to bit 1, * etc.