2009-01-09 12:27:08 -08:00
/*
* Uniprocessor - only support functions . The counterpart to kernel / smp . c
*/
2009-01-12 16:04:37 +01:00
# include <linux/interrupt.h>
2009-01-09 12:27:08 -08:00
# include <linux/kernel.h>
2011-05-23 14:51:41 -04:00
# include <linux/export.h>
2009-01-09 12:27:08 -08:00
# include <linux/smp.h>
2016-08-29 08:48:43 +02:00
# include <linux/hypervisor.h>
2009-01-09 12:27:08 -08:00
int smp_call_function_single ( int cpu , void ( * func ) ( void * info ) , void * info ,
int wait )
{
2013-09-11 14:23:25 -07:00
unsigned long flags ;
2009-01-11 05:15:21 +01:00
WARN_ON ( cpu ! = 0 ) ;
2013-09-11 14:23:25 -07:00
local_irq_save ( flags ) ;
func ( info ) ;
local_irq_restore ( flags ) ;
2009-01-11 05:15:21 +01:00
2009-01-09 12:27:08 -08:00
return 0 ;
}
EXPORT_SYMBOL ( smp_call_function_single ) ;
2013-09-11 14:23:24 -07:00
2014-02-24 16:40:02 +01:00
int smp_call_function_single_async ( int cpu , struct call_single_data * csd )
2013-11-14 14:32:08 -08:00
{
unsigned long flags ;
local_irq_save ( flags ) ;
csd - > func ( csd - > info ) ;
local_irq_restore ( flags ) ;
2014-02-24 16:39:57 +01:00
return 0 ;
2013-11-14 14:32:08 -08:00
}
2014-02-24 16:40:02 +01:00
EXPORT_SYMBOL ( smp_call_function_single_async ) ;
2013-11-14 14:32:08 -08:00
2013-09-11 14:23:26 -07:00
int on_each_cpu ( smp_call_func_t func , void * info , int wait )
{
unsigned long flags ;
local_irq_save ( flags ) ;
func ( info ) ;
local_irq_restore ( flags ) ;
return 0 ;
}
EXPORT_SYMBOL ( on_each_cpu ) ;
2013-09-11 14:23:24 -07:00
/*
* Note we still need to test the mask even for UP
* because we actually can get an empty mask from
* code that on SMP might call us without the local
* CPU in the mask .
*/
void on_each_cpu_mask ( const struct cpumask * mask ,
smp_call_func_t func , void * info , bool wait )
{
unsigned long flags ;
if ( cpumask_test_cpu ( 0 , mask ) ) {
local_irq_save ( flags ) ;
func ( info ) ;
local_irq_restore ( flags ) ;
}
}
EXPORT_SYMBOL ( on_each_cpu_mask ) ;
/*
* Preemption is disabled here to make sure the cond_func is called under the
* same condtions in UP and SMP .
*/
void on_each_cpu_cond ( bool ( * cond_func ) ( int cpu , void * info ) ,
smp_call_func_t func , void * info , bool wait ,
gfp_t gfp_flags )
{
unsigned long flags ;
preempt_disable ( ) ;
if ( cond_func ( 0 , info ) ) {
local_irq_save ( flags ) ;
func ( info ) ;
local_irq_restore ( flags ) ;
}
preempt_enable ( ) ;
}
EXPORT_SYMBOL ( on_each_cpu_cond ) ;
2016-08-29 08:48:44 +02:00
int smp_call_on_cpu ( unsigned int cpu , int ( * func ) ( void * ) , void * par , bool phys )
{
int ret ;
if ( cpu ! = 0 )
return - ENXIO ;
if ( phys )
hypervisor_pin_vcpu ( 0 ) ;
ret = func ( par ) ;
if ( phys )
hypervisor_pin_vcpu ( - 1 ) ;
return ret ;
}
EXPORT_SYMBOL_GPL ( smp_call_on_cpu ) ;