1
0
Fork 0

x86: remove fastcall from include/asm-x86

Signed-off-by: Harvey Harrison <harvey.harrison@gmail.com>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
hifive-unleashed-5.1
Harvey Harrison 2008-01-30 13:31:17 +01:00 committed by Ingo Molnar
parent 75604d7f7f
commit 341d8854d3
4 changed files with 17 additions and 18 deletions

View File

@ -59,17 +59,17 @@ extern unsigned boot_cpu_id;
#define setup_secondary_clock setup_secondary_APIC_clock
#endif
static inline fastcall void native_apic_write(unsigned long reg, u32 v)
static inline void native_apic_write(unsigned long reg, u32 v)
{
*((volatile u32 *)(APIC_BASE + reg)) = v;
}
static inline fastcall void native_apic_write_atomic(unsigned long reg, u32 v)
static inline void native_apic_write_atomic(unsigned long reg, u32 v)
{
(void) xchg((u32*)(APIC_BASE + reg), v);
}
static inline fastcall u32 native_apic_read(unsigned long reg)
static inline u32 native_apic_read(unsigned long reg)
{
return *((volatile u32 *)(APIC_BASE + reg));
}

View File

@ -29,16 +29,16 @@
extern void (*interrupt[NR_IRQS])(void);
#ifdef CONFIG_SMP
fastcall void reschedule_interrupt(void);
fastcall void invalidate_interrupt(void);
fastcall void call_function_interrupt(void);
void reschedule_interrupt(void);
void invalidate_interrupt(void);
void call_function_interrupt(void);
#endif
#ifdef CONFIG_X86_LOCAL_APIC
fastcall void apic_timer_interrupt(void);
fastcall void error_interrupt(void);
fastcall void spurious_interrupt(void);
fastcall void thermal_interrupt(void);
void apic_timer_interrupt(void);
void error_interrupt(void);
void spurious_interrupt(void);
void thermal_interrupt(void);
#define platform_legacy_irq(irq) ((irq) < 16)
#endif

View File

@ -26,7 +26,7 @@ do { \
unsigned int dummy; \
\
typecheck(atomic_t *, count); \
typecheck_fn(fastcall void (*)(atomic_t *), fail_fn); \
typecheck_fn(void (*)(atomic_t *), fail_fn); \
\
__asm__ __volatile__( \
LOCK_PREFIX " decl (%%eax) \n" \
@ -51,8 +51,7 @@ do { \
* or anything the slow path function returns
*/
static inline int
__mutex_fastpath_lock_retval(atomic_t *count,
int fastcall (*fail_fn)(atomic_t *))
__mutex_fastpath_lock_retval(atomic_t *count, int (*fail_fn)(atomic_t *))
{
if (unlikely(atomic_dec_return(count) < 0))
return fail_fn(count);
@ -78,7 +77,7 @@ do { \
unsigned int dummy; \
\
typecheck(atomic_t *, count); \
typecheck_fn(fastcall void (*)(atomic_t *), fail_fn); \
typecheck_fn(void (*)(atomic_t *), fail_fn); \
\
__asm__ __volatile__( \
LOCK_PREFIX " incl (%%eax) \n" \

View File

@ -83,10 +83,10 @@ static inline void init_MUTEX_LOCKED (struct semaphore *sem)
sema_init(sem, 0);
}
fastcall void __down_failed(void /* special register calling convention */);
fastcall int __down_failed_interruptible(void /* params in registers */);
fastcall int __down_failed_trylock(void /* params in registers */);
fastcall void __up_wakeup(void /* special register calling convention */);
void __down_failed(void /* special register calling convention */);
int __down_failed_interruptible(void /* params in registers */);
int __down_failed_trylock(void /* params in registers */);
void __up_wakeup(void /* special register calling convention */);
/*
* This is ugly, but we want the default case to fall through.