2005-04-17 06:20:36 +08:00
|
|
|
/*
|
2009-12-07 19:52:07 +08:00
|
|
|
* Copyright IBM Corp. 1999,2009
|
|
|
|
* Author(s): Denis Joseph Barrow,
|
|
|
|
* Martin Schwidefsky <schwidefsky@de.ibm.com>,
|
|
|
|
* Heiko Carstens <heiko.carstens@de.ibm.com>,
|
2005-04-17 06:20:36 +08:00
|
|
|
*/
|
|
|
|
#ifndef __ASM_SMP_H
|
|
|
|
#define __ASM_SMP_H
|
|
|
|
|
2009-12-07 19:52:07 +08:00
|
|
|
#ifdef CONFIG_SMP
|
2005-04-17 06:20:36 +08:00
|
|
|
|
2008-04-30 19:38:45 +08:00
|
|
|
#include <asm/system.h>
|
2009-12-07 19:52:07 +08:00
|
|
|
#include <asm/sigp.h>
|
2005-04-17 06:20:36 +08:00
|
|
|
|
2007-02-06 04:16:47 +08:00
|
|
|
extern void machine_restart_smp(char *);
|
|
|
|
extern void machine_halt_smp(void);
|
|
|
|
extern void machine_power_off_smp(void);
|
|
|
|
|
2009-03-26 22:24:42 +08:00
|
|
|
#define raw_smp_processor_id() (S390_lowcore.cpu_nr)
|
2005-04-17 06:20:36 +08:00
|
|
|
|
|
|
|
extern int __cpu_disable (void);
|
|
|
|
extern void __cpu_die (unsigned int cpu);
|
|
|
|
extern void cpu_die (void) __attribute__ ((noreturn));
|
|
|
|
extern int __cpu_up (unsigned int cpu);
|
|
|
|
|
2008-04-17 13:46:12 +08:00
|
|
|
extern struct mutex smp_cpu_state_mutex;
|
2008-04-17 13:46:13 +08:00
|
|
|
extern int smp_cpu_polarization[];
|
2008-04-17 13:46:12 +08:00
|
|
|
|
2008-12-25 20:38:39 +08:00
|
|
|
extern void arch_send_call_function_single_ipi(int cpu);
|
2009-09-24 23:34:45 +08:00
|
|
|
extern void arch_send_call_function_ipi_mask(const struct cpumask *mask);
|
2008-12-25 20:38:39 +08:00
|
|
|
|
2010-02-27 05:37:32 +08:00
|
|
|
extern struct save_area *zfcpdump_save_areas[NR_CPUS + 1];
|
2009-12-07 19:52:07 +08:00
|
|
|
|
2010-02-27 05:37:34 +08:00
|
|
|
extern void smp_switch_to_ipl_cpu(void (*func)(void *), void *);
|
|
|
|
extern void smp_switch_to_cpu(void (*)(void *), void *, unsigned long sp,
|
|
|
|
int from, int to);
|
|
|
|
extern void smp_restart_cpu(void);
|
|
|
|
|
2010-02-27 05:37:40 +08:00
|
|
|
/*
|
|
|
|
* returns 1 if (virtual) cpu is scheduled
|
|
|
|
* returns 0 otherwise
|
|
|
|
*/
|
|
|
|
static inline int smp_vcpu_scheduled(int cpu)
|
|
|
|
{
|
|
|
|
u32 status;
|
|
|
|
|
|
|
|
switch (sigp_ps(&status, 0, cpu, sigp_sense_running)) {
|
|
|
|
case sigp_status_stored:
|
|
|
|
/* Check for running status */
|
|
|
|
if (status & 0x400)
|
|
|
|
return 0;
|
|
|
|
break;
|
|
|
|
case sigp_not_operational:
|
|
|
|
return 0;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2010-02-27 05:37:34 +08:00
|
|
|
#else /* CONFIG_SMP */
|
|
|
|
|
|
|
|
static inline void smp_switch_to_ipl_cpu(void (*func)(void *), void *data)
|
|
|
|
{
|
|
|
|
func(data);
|
|
|
|
}
|
|
|
|
|
2010-02-27 05:37:40 +08:00
|
|
|
#define smp_vcpu_scheduled (1)
|
|
|
|
|
2009-12-07 19:52:07 +08:00
|
|
|
#endif /* CONFIG_SMP */
|
2005-04-17 06:20:36 +08:00
|
|
|
|
2008-04-30 19:38:37 +08:00
|
|
|
#ifdef CONFIG_HOTPLUG_CPU
|
|
|
|
extern int smp_rescan_cpus(void);
|
|
|
|
#else
|
|
|
|
static inline int smp_rescan_cpus(void) { return 0; }
|
|
|
|
#endif
|
|
|
|
|
2009-12-07 19:52:07 +08:00
|
|
|
#endif /* __ASM_SMP_H */
|