--- mm/slub.c | 29 +++++++++++++++++++++-------- 1 file changed, 21 insertions(+), 8 deletions(-) Index: linux-2.6/mm/slub.c =================================================================== --- linux-2.6.orig/mm/slub.c 2007-10-25 19:55:55.000000000 -0700 +++ linux-2.6/mm/slub.c 2007-10-25 19:56:52.000000000 -0700 @@ -1171,7 +1171,7 @@ static void discard_slab(struct kmem_cac free_slab(s, page); } -#ifdef CONFIG_FAST_CMPXCHG_LOCAL +#ifdef __HAVE_ARCH_CMPXCHG /* * Per slab locking using the pagelock */ @@ -1199,6 +1199,23 @@ static __always_inline unsigned long sla __acquire(bitlock); return state; } + +static __always_inline unsigned long slab_lock(struct page *page) +{ + unsigned long state; + + preempt_disable(); +#ifdef CONFIG_SMP + do { +#endif + state = page->flags & ~LOCKED; +#ifdef CONFIG_SMP + } while (cmpxchg(&page->flags, state, state | LOCKED) != state); +#endif + __acquire(bitlock); + return state; +} + #else static __always_inline void slab_unlock(struct page *page, unsigned long state) @@ -1213,17 +1230,13 @@ static __always_inline unsigned long sla return 0; return page->flags; } -#endif static __always_inline unsigned long slab_lock(struct page *page) { - unsigned long state; - - do { - state = slab_trylock(page); - } while (!state); - return state; + bit_spin_lock(PG_locked, &page->flags); + return page->flags; } +#endif /* * Management of partially allocated slabs