blob: 901fff736b73303bef2634006dca0d078e20786f [file] [log] [blame]
/*
m5threads, a pthread library for the M5 simulator
Copyright (C) 2009, Stanford University
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef __SPINLOCK_SPARC_H__
#define __SPINLOCK_SPARC_H__
// routines from /usr/src/linux/include/asm-sparc/spinlock_64.h
// Note: these work even with RMO, but a few barriers could be eliminated for TSO
static __inline__ void spin_lock(volatile int* lock)
{
unsigned long tmp;
__asm__ __volatile__(
"1: ldstub [%1], %0\n"
" membar #StoreLoad | #StoreStore\n"
" brnz,pn %0, 2f\n"
" nop\n"
" .subsection 2\n"
"2: ldub [%1], %0\n"
" membar #LoadLoad\n"
" brnz,pt %0, 2b\n"
" nop\n"
" ba,a,pt %%xcc, 1b\n"
" .previous"
: "=&r" (tmp)
: "r" (lock)
: "memory");
}
static __inline__ int trylock(volatile int* lock)
{
unsigned long result;
__asm__ __volatile__(
" ldstub [%1], %0\n"
" membar #StoreLoad | #StoreStore"
: "=r" (result)
: "r" (lock)
: "memory");
return (result == 0);
}
static __inline__ void spin_unlock(volatile int* lock)
{
__asm__ __volatile__(
" membar #StoreStore | #LoadStore\n"
" stb %%g0, [%0]"
: // No outputs
: "r" (lock)
: "memory");
}
#endif // __SPINLOCK_SPARC_H__