2009-01-09 23:27:08 +03:00
/*
* Uniprocessor - only support functions . The counterpart to kernel / smp . c
*/
2009-01-12 18:04:37 +03:00
# include <linux/interrupt.h>
2009-01-09 23:27:08 +03:00
# include <linux/kernel.h>
2011-05-23 22:51:41 +04:00
# include <linux/export.h>
2009-01-09 23:27:08 +03:00
# include <linux/smp.h>
2016-08-29 09:48:43 +03:00
# include <linux/hypervisor.h>
2009-01-09 23:27:08 +03:00
int smp_call_function_single ( int cpu , void ( * func ) ( void * info ) , void * info ,
int wait )
{
2013-09-12 01:23:25 +04:00
unsigned long flags ;
2009-01-11 07:15:21 +03:00
WARN_ON ( cpu ! = 0 ) ;
2013-09-12 01:23:25 +04:00
local_irq_save ( flags ) ;
func ( info ) ;
local_irq_restore ( flags ) ;
2009-01-11 07:15:21 +03:00
2009-01-09 23:27:08 +03:00
return 0 ;
}
EXPORT_SYMBOL ( smp_call_function_single ) ;
2013-09-12 01:23:24 +04:00
2014-02-24 19:40:02 +04:00
int smp_call_function_single_async ( int cpu , struct call_single_data * csd )
2013-11-15 02:32:08 +04:00
{
unsigned long flags ;
local_irq_save ( flags ) ;
csd - > func ( csd - > info ) ;
local_irq_restore ( flags ) ;
2014-02-24 19:39:57 +04:00
return 0 ;
2013-11-15 02:32:08 +04:00
}
2014-02-24 19:40:02 +04:00
EXPORT_SYMBOL ( smp_call_function_single_async ) ;
2013-11-15 02:32:08 +04:00
2013-09-12 01:23:26 +04:00
int on_each_cpu ( smp_call_func_t func , void * info , int wait )
{
unsigned long flags ;
local_irq_save ( flags ) ;
func ( info ) ;
local_irq_restore ( flags ) ;
return 0 ;
}
EXPORT_SYMBOL ( on_each_cpu ) ;
2013-09-12 01:23:24 +04:00
/*
* Note we still need to test the mask even for UP
* because we actually can get an empty mask from
* code that on SMP might call us without the local
* CPU in the mask .
*/
void on_each_cpu_mask ( const struct cpumask * mask ,
smp_call_func_t func , void * info , bool wait )
{
unsigned long flags ;
if ( cpumask_test_cpu ( 0 , mask ) ) {
local_irq_save ( flags ) ;
func ( info ) ;
local_irq_restore ( flags ) ;
}
}
EXPORT_SYMBOL ( on_each_cpu_mask ) ;
/*
* Preemption is disabled here to make sure the cond_func is called under the
* same condtions in UP and SMP .
*/
void on_each_cpu_cond ( bool ( * cond_func ) ( int cpu , void * info ) ,
smp_call_func_t func , void * info , bool wait ,
gfp_t gfp_flags )
{
unsigned long flags ;
preempt_disable ( ) ;
if ( cond_func ( 0 , info ) ) {
local_irq_save ( flags ) ;
func ( info ) ;
local_irq_restore ( flags ) ;
}
preempt_enable ( ) ;
}
EXPORT_SYMBOL ( on_each_cpu_cond ) ;
2016-08-29 09:48:44 +03:00
int smp_call_on_cpu ( unsigned int cpu , int ( * func ) ( void * ) , void * par , bool phys )
{
int ret ;
if ( cpu ! = 0 )
return - ENXIO ;
if ( phys )
hypervisor_pin_vcpu ( 0 ) ;
ret = func ( par ) ;
if ( phys )
hypervisor_pin_vcpu ( - 1 ) ;
return ret ;
}
EXPORT_SYMBOL_GPL ( smp_call_on_cpu ) ;