#ifndef __UM_CACHE_H #define __UM_CACHE_H #if defined([31mCONFIG_UML_X86[0m) && !defined([31mCONFIG_64BIT[0m) # define L1_CACHE_SHIFT ([31mCONFIG_X86_L1_CACHE_SHIFT[0m) #elif defined([31mCONFIG_UML_X86[0m) /* 64-bit */ # define L1_CACHE_SHIFT 6 /* Should be 7 on Intel */ #else /* XXX: this was taken from x86, now it's completely random. Luckily only * affects SMP padding. */ # define L1_CACHE_SHIFT 5 #endif #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) #endif |