Fix bugs in the new VirtualSerialMassStorage demo (thanks to Martin Degelsegger).
[pub/USBasp.git] / LUFA / Common / Common.h
index e24906d..b7d564a 100644 (file)
@@ -53,9 +53,7 @@
 #define __LUFA_COMMON_H__
 
        /* Macros: */
-               #if !defined(__DOXYGEN__)
-                       #define __INCLUDE_FROM_COMMON_H
-               #endif
+               #define __INCLUDE_FROM_COMMON_H
                
        /* Includes: */
                #include <stdint.h>
                        
                        #define  ARCH_LITTLE_ENDIAN
                        #include "Endianness.h"
-               #elif (ARCH == ARCH_UC3B)
+               #elif (ARCH == ARCH_UC3)
                        #include <avr32/io.h>
 
                        // === TODO: Find abstracted way to handle these ===
                        #define ISR(Name)                void Name (void) __attribute__((__interrupt__)); void Name (void)
-                       #define EEMEM
                        #define PROGMEM                  const
                        #define ATOMIC_BLOCK(x)          if (1)
                        #define ATOMIC_RESTORESTATE
                        #define pgm_read_byte(x)         *x
-                       #define eeprom_read_byte(x)      *x
-                       #define eeprom_update_byte(x, y) *x = y
-                       #define eeprom_write_byte(x, y)  *x = y
                        #define _delay_ms(x)
                        #define memcmp_P(...)            memcmp(__VA_ARGS__)
                        #define memcpy_P(...)            memcpy(__VA_ARGS__)
                         */
                        #define GCC_FORCE_POINTER_ACCESS(StructPtr) __asm__ __volatile__("" : "=b" (StructPtr) : "0" (StructPtr))
 
+                       /** Forces GCC to create a memory barrier, ensuring that memory accesses are not reordered past the barrier point.
+                        *  This can be used before ordering-critical operations, to ensure that the compiler does not re-order the resulting
+                        *  assembly output in an unexpected manner on sections of code that are ordering-specific.
+                        */
+                       #define GCC_MEMORY_BARRIER()                __asm__ __volatile__("" ::: "memory");
+
                /* Inline Functions: */
                        /** Function to reverse the individual bits in a byte - i.e. bit 7 is moved to bit 0, bit 6 to bit 1,
                         *  etc.