2018-07-16 08:54:32 +03:00
// SPDX-License-Identifier: GPL-2.0
2015-12-01 04:31:39 +03:00
/*
2018-03-08 10:18:14 +03:00
* Copyright ( c ) 2015 , 2018 , The Linux Foundation . All rights reserved .
2023-03-07 09:22:24 +03:00
* Copyright ( c ) 2021 , 2023 , Qualcomm Innovation Center , Inc . All rights reserved .
2015-12-01 04:31:39 +03:00
*/
# include <linux/kernel.h>
# include <linux/export.h>
# include <linux/clk-provider.h>
# include <linux/regmap.h>
# include <linux/delay.h>
# include "clk-alpha-pll.h"
2016-09-29 11:35:45 +03:00
# include "common.h"
2015-12-01 04:31:39 +03:00
2017-09-28 20:50:40 +03:00
# define PLL_MODE(p) ((p)->offset + 0x0)
2015-12-01 04:31:39 +03:00
# define PLL_OUTCTRL BIT(0)
# define PLL_BYPASSNL BIT(1)
# define PLL_RESET_N BIT(2)
2016-09-29 11:35:42 +03:00
# define PLL_OFFLINE_REQ BIT(7)
2015-12-01 04:31:39 +03:00
# define PLL_LOCK_COUNT_SHIFT 8
# define PLL_LOCK_COUNT_MASK 0x3f
# define PLL_BIAS_COUNT_SHIFT 14
# define PLL_BIAS_COUNT_MASK 0x3f
# define PLL_VOTE_FSM_ENA BIT(20)
2016-09-29 11:35:42 +03:00
# define PLL_FSM_ENA BIT(20)
2015-12-01 04:31:39 +03:00
# define PLL_VOTE_FSM_RESET BIT(21)
2017-09-28 20:50:45 +03:00
# define PLL_UPDATE BIT(22)
# define PLL_UPDATE_BYPASS BIT(23)
2022-09-21 03:13:01 +03:00
# define PLL_FSM_LEGACY_MODE BIT(24)
2016-09-29 11:35:42 +03:00
# define PLL_OFFLINE_ACK BIT(28)
2017-09-28 20:50:45 +03:00
# define ALPHA_PLL_ACK_LATCH BIT(29)
2015-12-01 04:31:39 +03:00
# define PLL_ACTIVE_FLAG BIT(30)
# define PLL_LOCK_DET BIT(31)
2017-09-28 20:50:40 +03:00
# define PLL_L_VAL(p) ((p)->offset + (p)->regs[PLL_OFF_L_VAL])
2019-07-22 10:43:46 +03:00
# define PLL_CAL_L_VAL(p) ((p)->offset + (p)->regs[PLL_OFF_CAL_L_VAL])
2017-09-28 20:50:40 +03:00
# define PLL_ALPHA_VAL(p) ((p)->offset + (p)->regs[PLL_OFF_ALPHA_VAL])
# define PLL_ALPHA_VAL_U(p) ((p)->offset + (p)->regs[PLL_OFF_ALPHA_VAL_U])
2015-12-01 04:31:39 +03:00
2017-09-28 20:50:40 +03:00
# define PLL_USER_CTL(p) ((p)->offset + (p)->regs[PLL_OFF_USER_CTL])
2015-12-01 04:31:39 +03:00
# define PLL_POST_DIV_SHIFT 8
2017-09-28 20:50:49 +03:00
# define PLL_POST_DIV_MASK(p) GENMASK((p)->width, 0)
2015-12-01 04:31:39 +03:00
# define PLL_ALPHA_EN BIT(24)
2017-09-28 20:50:46 +03:00
# define PLL_ALPHA_MODE BIT(25)
2015-12-01 04:31:39 +03:00
# define PLL_VCO_SHIFT 20
# define PLL_VCO_MASK 0x3
2017-09-28 20:50:40 +03:00
# define PLL_USER_CTL_U(p) ((p)->offset + (p)->regs[PLL_OFF_USER_CTL_U])
2019-07-22 10:43:46 +03:00
# define PLL_USER_CTL_U1(p) ((p)->offset + (p)->regs[PLL_OFF_USER_CTL_U1])
2017-09-28 20:50:40 +03:00
# define PLL_CONFIG_CTL(p) ((p)->offset + (p)->regs[PLL_OFF_CONFIG_CTL])
# define PLL_CONFIG_CTL_U(p) ((p)->offset + (p)->regs[PLL_OFF_CONFIG_CTL_U])
2019-07-22 10:43:46 +03:00
# define PLL_CONFIG_CTL_U1(p) ((p)->offset + (p)->regs[PLL_OFF_CONFIG_CTL_U1])
2017-09-28 20:50:40 +03:00
# define PLL_TEST_CTL(p) ((p)->offset + (p)->regs[PLL_OFF_TEST_CTL])
# define PLL_TEST_CTL_U(p) ((p)->offset + (p)->regs[PLL_OFF_TEST_CTL_U])
2020-02-24 07:50:01 +03:00
# define PLL_TEST_CTL_U1(p) ((p)->offset + (p)->regs[PLL_OFF_TEST_CTL_U1])
2023-05-24 17:52:00 +03:00
# define PLL_TEST_CTL_U2(p) ((p)->offset + (p)->regs[PLL_OFF_TEST_CTL_U2])
2017-09-28 20:50:40 +03:00
# define PLL_STATUS(p) ((p)->offset + (p)->regs[PLL_OFF_STATUS])
2018-03-08 10:18:14 +03:00
# define PLL_OPMODE(p) ((p)->offset + (p)->regs[PLL_OFF_OPMODE])
# define PLL_FRAC(p) ((p)->offset + (p)->regs[PLL_OFF_FRAC])
2017-09-28 20:50:40 +03:00
const u8 clk_alpha_pll_regs [ ] [ PLL_OFF_MAX_REGS ] = {
[ CLK_ALPHA_PLL_TYPE_DEFAULT ] = {
[ PLL_OFF_L_VAL ] = 0x04 ,
[ PLL_OFF_ALPHA_VAL ] = 0x08 ,
[ PLL_OFF_ALPHA_VAL_U ] = 0x0c ,
[ PLL_OFF_USER_CTL ] = 0x10 ,
[ PLL_OFF_USER_CTL_U ] = 0x14 ,
[ PLL_OFF_CONFIG_CTL ] = 0x18 ,
[ PLL_OFF_TEST_CTL ] = 0x1c ,
[ PLL_OFF_TEST_CTL_U ] = 0x20 ,
[ PLL_OFF_STATUS ] = 0x24 ,
} ,
2017-09-28 20:50:46 +03:00
[ CLK_ALPHA_PLL_TYPE_HUAYRA ] = {
[ PLL_OFF_L_VAL ] = 0x04 ,
[ PLL_OFF_ALPHA_VAL ] = 0x08 ,
[ PLL_OFF_USER_CTL ] = 0x10 ,
[ PLL_OFF_CONFIG_CTL ] = 0x14 ,
[ PLL_OFF_CONFIG_CTL_U ] = 0x18 ,
[ PLL_OFF_TEST_CTL ] = 0x1c ,
[ PLL_OFF_TEST_CTL_U ] = 0x20 ,
[ PLL_OFF_STATUS ] = 0x24 ,
} ,
2017-09-28 20:50:48 +03:00
[ CLK_ALPHA_PLL_TYPE_BRAMMO ] = {
[ PLL_OFF_L_VAL ] = 0x04 ,
[ PLL_OFF_ALPHA_VAL ] = 0x08 ,
[ PLL_OFF_ALPHA_VAL_U ] = 0x0c ,
[ PLL_OFF_USER_CTL ] = 0x10 ,
[ PLL_OFF_CONFIG_CTL ] = 0x18 ,
[ PLL_OFF_TEST_CTL ] = 0x1c ,
[ PLL_OFF_STATUS ] = 0x24 ,
} ,
2018-03-08 10:18:14 +03:00
[ CLK_ALPHA_PLL_TYPE_FABIA ] = {
[ PLL_OFF_L_VAL ] = 0x04 ,
[ PLL_OFF_USER_CTL ] = 0x0c ,
[ PLL_OFF_USER_CTL_U ] = 0x10 ,
[ PLL_OFF_CONFIG_CTL ] = 0x14 ,
[ PLL_OFF_CONFIG_CTL_U ] = 0x18 ,
[ PLL_OFF_TEST_CTL ] = 0x1c ,
[ PLL_OFF_TEST_CTL_U ] = 0x20 ,
[ PLL_OFF_STATUS ] = 0x24 ,
[ PLL_OFF_OPMODE ] = 0x2c ,
[ PLL_OFF_FRAC ] = 0x38 ,
} ,
2019-07-22 10:43:46 +03:00
[ CLK_ALPHA_PLL_TYPE_TRION ] = {
2020-02-24 07:50:01 +03:00
[ PLL_OFF_L_VAL ] = 0x04 ,
[ PLL_OFF_CAL_L_VAL ] = 0x08 ,
[ PLL_OFF_USER_CTL ] = 0x0c ,
[ PLL_OFF_USER_CTL_U ] = 0x10 ,
[ PLL_OFF_USER_CTL_U1 ] = 0x14 ,
[ PLL_OFF_CONFIG_CTL ] = 0x18 ,
[ PLL_OFF_CONFIG_CTL_U ] = 0x1c ,
[ PLL_OFF_CONFIG_CTL_U1 ] = 0x20 ,
[ PLL_OFF_TEST_CTL ] = 0x24 ,
[ PLL_OFF_TEST_CTL_U ] = 0x28 ,
[ PLL_OFF_TEST_CTL_U1 ] = 0x2c ,
[ PLL_OFF_STATUS ] = 0x30 ,
[ PLL_OFF_OPMODE ] = 0x38 ,
[ PLL_OFF_ALPHA_VAL ] = 0x40 ,
} ,
2020-10-16 21:43:33 +03:00
[ CLK_ALPHA_PLL_TYPE_AGERA ] = {
[ PLL_OFF_L_VAL ] = 0x04 ,
[ PLL_OFF_ALPHA_VAL ] = 0x08 ,
[ PLL_OFF_USER_CTL ] = 0x0c ,
[ PLL_OFF_CONFIG_CTL ] = 0x10 ,
[ PLL_OFF_CONFIG_CTL_U ] = 0x14 ,
[ PLL_OFF_TEST_CTL ] = 0x18 ,
[ PLL_OFF_TEST_CTL_U ] = 0x1c ,
[ PLL_OFF_STATUS ] = 0x2c ,
} ,
2021-06-09 05:20:46 +03:00
[ CLK_ALPHA_PLL_TYPE_ZONDA ] = {
[ PLL_OFF_L_VAL ] = 0x04 ,
[ PLL_OFF_ALPHA_VAL ] = 0x08 ,
[ PLL_OFF_USER_CTL ] = 0x0c ,
[ PLL_OFF_CONFIG_CTL ] = 0x10 ,
[ PLL_OFF_CONFIG_CTL_U ] = 0x14 ,
[ PLL_OFF_CONFIG_CTL_U1 ] = 0x18 ,
[ PLL_OFF_TEST_CTL ] = 0x1c ,
[ PLL_OFF_TEST_CTL_U ] = 0x20 ,
[ PLL_OFF_TEST_CTL_U1 ] = 0x24 ,
[ PLL_OFF_OPMODE ] = 0x28 ,
[ PLL_OFF_STATUS ] = 0x38 ,
} ,
2021-12-07 10:32:50 +03:00
[ CLK_ALPHA_PLL_TYPE_LUCID_EVO ] = {
[ PLL_OFF_OPMODE ] = 0x04 ,
[ PLL_OFF_STATUS ] = 0x0c ,
[ PLL_OFF_L_VAL ] = 0x10 ,
[ PLL_OFF_ALPHA_VAL ] = 0x14 ,
[ PLL_OFF_USER_CTL ] = 0x18 ,
[ PLL_OFF_USER_CTL_U ] = 0x1c ,
[ PLL_OFF_CONFIG_CTL ] = 0x20 ,
[ PLL_OFF_CONFIG_CTL_U ] = 0x24 ,
[ PLL_OFF_CONFIG_CTL_U1 ] = 0x28 ,
[ PLL_OFF_TEST_CTL ] = 0x2c ,
[ PLL_OFF_TEST_CTL_U ] = 0x30 ,
[ PLL_OFF_TEST_CTL_U1 ] = 0x34 ,
} ,
2022-11-30 14:28:47 +03:00
[ CLK_ALPHA_PLL_TYPE_LUCID_OLE ] = {
[ PLL_OFF_OPMODE ] = 0x04 ,
[ PLL_OFF_STATE ] = 0x08 ,
[ PLL_OFF_STATUS ] = 0x0c ,
[ PLL_OFF_L_VAL ] = 0x10 ,
[ PLL_OFF_ALPHA_VAL ] = 0x14 ,
[ PLL_OFF_USER_CTL ] = 0x18 ,
[ PLL_OFF_USER_CTL_U ] = 0x1c ,
[ PLL_OFF_CONFIG_CTL ] = 0x20 ,
[ PLL_OFF_CONFIG_CTL_U ] = 0x24 ,
[ PLL_OFF_CONFIG_CTL_U1 ] = 0x28 ,
[ PLL_OFF_TEST_CTL ] = 0x2c ,
[ PLL_OFF_TEST_CTL_U ] = 0x30 ,
[ PLL_OFF_TEST_CTL_U1 ] = 0x34 ,
[ PLL_OFF_TEST_CTL_U2 ] = 0x38 ,
} ,
2022-07-01 09:27:39 +03:00
[ CLK_ALPHA_PLL_TYPE_RIVIAN_EVO ] = {
[ PLL_OFF_OPMODE ] = 0x04 ,
[ PLL_OFF_STATUS ] = 0x0c ,
[ PLL_OFF_L_VAL ] = 0x10 ,
[ PLL_OFF_USER_CTL ] = 0x14 ,
[ PLL_OFF_USER_CTL_U ] = 0x18 ,
[ PLL_OFF_CONFIG_CTL ] = 0x1c ,
[ PLL_OFF_CONFIG_CTL_U ] = 0x20 ,
[ PLL_OFF_CONFIG_CTL_U1 ] = 0x24 ,
[ PLL_OFF_TEST_CTL ] = 0x28 ,
[ PLL_OFF_TEST_CTL_U ] = 0x2c ,
} ,
2022-08-30 10:56:20 +03:00
[ CLK_ALPHA_PLL_TYPE_DEFAULT_EVO ] = {
[ PLL_OFF_L_VAL ] = 0x04 ,
[ PLL_OFF_ALPHA_VAL ] = 0x08 ,
[ PLL_OFF_ALPHA_VAL_U ] = 0x0c ,
[ PLL_OFF_TEST_CTL ] = 0x10 ,
[ PLL_OFF_TEST_CTL_U ] = 0x14 ,
[ PLL_OFF_USER_CTL ] = 0x18 ,
[ PLL_OFF_USER_CTL_U ] = 0x1c ,
[ PLL_OFF_CONFIG_CTL ] = 0x20 ,
[ PLL_OFF_STATUS ] = 0x24 ,
} ,
[ CLK_ALPHA_PLL_TYPE_BRAMMO_EVO ] = {
[ PLL_OFF_L_VAL ] = 0x04 ,
[ PLL_OFF_ALPHA_VAL ] = 0x08 ,
[ PLL_OFF_ALPHA_VAL_U ] = 0x0c ,
[ PLL_OFF_TEST_CTL ] = 0x10 ,
[ PLL_OFF_TEST_CTL_U ] = 0x14 ,
[ PLL_OFF_USER_CTL ] = 0x18 ,
[ PLL_OFF_CONFIG_CTL ] = 0x1C ,
[ PLL_OFF_STATUS ] = 0x20 ,
} ,
2023-03-07 09:22:24 +03:00
[ CLK_ALPHA_PLL_TYPE_STROMER ] = {
[ PLL_OFF_L_VAL ] = 0x08 ,
[ PLL_OFF_ALPHA_VAL ] = 0x10 ,
[ PLL_OFF_ALPHA_VAL_U ] = 0x14 ,
[ PLL_OFF_USER_CTL ] = 0x18 ,
[ PLL_OFF_USER_CTL_U ] = 0x1c ,
[ PLL_OFF_CONFIG_CTL ] = 0x20 ,
[ PLL_OFF_CONFIG_CTL_U ] = 0xff ,
[ PLL_OFF_TEST_CTL ] = 0x30 ,
[ PLL_OFF_TEST_CTL_U ] = 0x34 ,
[ PLL_OFF_STATUS ] = 0x28 ,
} ,
2023-03-07 09:22:25 +03:00
[ CLK_ALPHA_PLL_TYPE_STROMER_PLUS ] = {
[ PLL_OFF_L_VAL ] = 0x04 ,
[ PLL_OFF_USER_CTL ] = 0x08 ,
[ PLL_OFF_USER_CTL_U ] = 0x0c ,
[ PLL_OFF_CONFIG_CTL ] = 0x10 ,
[ PLL_OFF_TEST_CTL ] = 0x14 ,
[ PLL_OFF_TEST_CTL_U ] = 0x18 ,
[ PLL_OFF_STATUS ] = 0x1c ,
[ PLL_OFF_ALPHA_VAL ] = 0x24 ,
[ PLL_OFF_ALPHA_VAL_U ] = 0x28 ,
} ,
2017-09-28 20:50:40 +03:00
} ;
EXPORT_SYMBOL_GPL ( clk_alpha_pll_regs ) ;
2015-12-01 04:31:39 +03:00
/*
* Even though 40 bits are present , use only 32 for ease of calculation .
*/
# define ALPHA_REG_BITWIDTH 40
2017-09-28 20:50:41 +03:00
# define ALPHA_REG_16BIT_WIDTH 16
# define ALPHA_BITWIDTH 32U
# define ALPHA_SHIFT(w) min(w, ALPHA_BITWIDTH)
2023-03-07 09:22:24 +03:00
# define ALPHA_PLL_STATUS_REG_SHIFT 8
2017-09-28 20:50:46 +03:00
# define PLL_HUAYRA_M_WIDTH 8
# define PLL_HUAYRA_M_SHIFT 8
# define PLL_HUAYRA_M_MASK 0xff
# define PLL_HUAYRA_N_SHIFT 0
# define PLL_HUAYRA_N_MASK 0xff
# define PLL_HUAYRA_ALPHA_WIDTH 16
2020-02-24 07:49:59 +03:00
# define PLL_STANDBY 0x0
# define PLL_RUN 0x1
# define PLL_OUT_MASK 0x7
# define PLL_RATE_MARGIN 500
2019-07-22 10:43:46 +03:00
2020-07-09 16:52:34 +03:00
/* TRION PLL specific settings and offsets */
# define TRION_PLL_CAL_VAL 0x44
# define TRION_PCAL_DONE BIT(26)
2020-02-24 07:50:01 +03:00
2020-07-09 16:52:35 +03:00
/* LUCID PLL specific settings and offsets */
# define LUCID_PCAL_DONE BIT(27)
2021-01-27 10:08:09 +03:00
/* LUCID 5LPE PLL specific settings and offsets */
# define LUCID_5LPE_PCAL_DONE BIT(11)
# define LUCID_5LPE_ALPHA_PLL_ACK_LATCH BIT(13)
# define LUCID_5LPE_PLL_LATCH_INPUT BIT(14)
# define LUCID_5LPE_ENABLE_VOTE_RUN BIT(21)
2021-12-07 10:32:50 +03:00
/* LUCID EVO PLL specific settings and offsets */
2022-07-01 09:27:29 +03:00
# define LUCID_EVO_PCAL_NOT_DONE BIT(8)
2021-12-07 10:32:50 +03:00
# define LUCID_EVO_ENABLE_VOTE_RUN BIT(25)
# define LUCID_EVO_PLL_L_VAL_MASK GENMASK(15, 0)
2022-07-01 09:27:29 +03:00
# define LUCID_EVO_PLL_CAL_L_VAL_SHIFT 16
2023-07-07 06:57:41 +03:00
# define LUCID_OLE_PLL_RINGOSC_CAL_L_VAL_SHIFT 24
2021-12-07 10:32:50 +03:00
2021-06-09 05:20:46 +03:00
/* ZONDA PLL specific */
# define ZONDA_PLL_OUT_MASK 0xf
# define ZONDA_STAY_IN_CFA BIT(16)
# define ZONDA_PLL_FREQ_LOCK_DET BIT(29)
2017-09-28 20:50:41 +03:00
# define pll_alpha_width(p) \
( ( PLL_ALPHA_VAL_U ( p ) - PLL_ALPHA_VAL ( p ) = = 4 ) ? \
ALPHA_REG_BITWIDTH : ALPHA_REG_16BIT_WIDTH )
2015-12-01 04:31:39 +03:00
2017-09-28 20:50:43 +03:00
# define pll_has_64bit_config(p) ((PLL_CONFIG_CTL_U(p) - PLL_CONFIG_CTL(p)) == 4)
2015-12-01 04:31:39 +03:00
# define to_clk_alpha_pll(_hw) container_of(to_clk_regmap(_hw), \
struct clk_alpha_pll , clkr )
# define to_clk_alpha_pll_postdiv(_hw) container_of(to_clk_regmap(_hw), \
struct clk_alpha_pll_postdiv , clkr )
2016-09-29 11:35:42 +03:00
static int wait_for_pll ( struct clk_alpha_pll * pll , u32 mask , bool inverse ,
const char * action )
2015-12-01 04:31:39 +03:00
{
2017-09-28 20:50:40 +03:00
u32 val ;
2015-12-01 04:31:39 +03:00
int count ;
int ret ;
const char * name = clk_hw_get_name ( & pll - > clkr . hw ) ;
2017-09-28 20:50:40 +03:00
ret = regmap_read ( pll - > clkr . regmap , PLL_MODE ( pll ) , & val ) ;
2015-12-01 04:31:39 +03:00
if ( ret )
return ret ;
2021-12-20 19:43:54 +03:00
for ( count = 200 ; count > 0 ; count - - ) {
2017-09-28 20:50:40 +03:00
ret = regmap_read ( pll - > clkr . regmap , PLL_MODE ( pll ) , & val ) ;
2015-12-01 04:31:39 +03:00
if ( ret )
return ret ;
2016-09-29 11:35:42 +03:00
if ( inverse & & ! ( val & mask ) )
return 0 ;
else if ( ( val & mask ) = = mask )
2015-12-01 04:31:39 +03:00
return 0 ;
udelay ( 1 ) ;
}
2016-09-29 11:35:42 +03:00
WARN ( 1 , " %s failed to %s! \n " , name , action ) ;
2015-12-01 04:31:39 +03:00
return - ETIMEDOUT ;
}
2016-09-29 11:35:42 +03:00
# define wait_for_pll_enable_active(pll) \
wait_for_pll ( pll , PLL_ACTIVE_FLAG , 0 , " enable " )
# define wait_for_pll_enable_lock(pll) \
wait_for_pll ( pll , PLL_LOCK_DET , 0 , " enable " )
2021-06-09 05:20:46 +03:00
# define wait_for_zonda_pll_freq_lock(pll) \
wait_for_pll ( pll , ZONDA_PLL_FREQ_LOCK_DET , 0 , " freq enable " )
2016-09-29 11:35:42 +03:00
# define wait_for_pll_disable(pll) \
wait_for_pll ( pll , PLL_ACTIVE_FLAG , 1 , " disable " )
# define wait_for_pll_offline(pll) \
wait_for_pll ( pll , PLL_OFFLINE_ACK , 0 , " offline " )
2017-09-28 20:50:45 +03:00
# define wait_for_pll_update(pll) \
wait_for_pll ( pll , PLL_UPDATE , 1 , " update " )
# define wait_for_pll_update_ack_set(pll) \
wait_for_pll ( pll , ALPHA_PLL_ACK_LATCH , 0 , " update_ack_set " )
# define wait_for_pll_update_ack_clear(pll) \
wait_for_pll ( pll , ALPHA_PLL_ACK_LATCH , 1 , " update_ack_clear " )
2020-10-16 21:43:32 +03:00
static void clk_alpha_pll_write_config ( struct regmap * regmap , unsigned int reg ,
unsigned int val )
{
if ( val )
regmap_write ( regmap , reg , val ) ;
}
2016-09-29 11:35:43 +03:00
void clk_alpha_pll_configure ( struct clk_alpha_pll * pll , struct regmap * regmap ,
const struct alpha_pll_config * config )
{
u32 val , mask ;
2017-09-28 20:50:40 +03:00
regmap_write ( regmap , PLL_L_VAL ( pll ) , config - > l ) ;
regmap_write ( regmap , PLL_ALPHA_VAL ( pll ) , config - > alpha ) ;
regmap_write ( regmap , PLL_CONFIG_CTL ( pll ) , config - > config_ctl_val ) ;
2017-09-28 20:50:43 +03:00
if ( pll_has_64bit_config ( pll ) )
regmap_write ( regmap , PLL_CONFIG_CTL_U ( pll ) ,
config - > config_ctl_hi_val ) ;
2016-09-29 11:35:43 +03:00
2017-09-28 20:50:44 +03:00
if ( pll_alpha_width ( pll ) > 32 )
regmap_write ( regmap , PLL_ALPHA_VAL_U ( pll ) , config - > alpha_hi ) ;
2016-09-29 11:35:43 +03:00
val = config - > main_output_mask ;
val | = config - > aux_output_mask ;
val | = config - > aux2_output_mask ;
val | = config - > early_output_mask ;
val | = config - > pre_div_val ;
val | = config - > post_div_val ;
val | = config - > vco_val ;
2017-09-28 20:50:44 +03:00
val | = config - > alpha_en_mask ;
val | = config - > alpha_mode_mask ;
2016-09-29 11:35:43 +03:00
mask = config - > main_output_mask ;
mask | = config - > aux_output_mask ;
mask | = config - > aux2_output_mask ;
mask | = config - > early_output_mask ;
mask | = config - > pre_div_mask ;
mask | = config - > post_div_mask ;
mask | = config - > vco_mask ;
2017-09-28 20:50:40 +03:00
regmap_update_bits ( regmap , PLL_USER_CTL ( pll ) , mask , val ) ;
2016-09-29 11:35:45 +03:00
2023-06-01 12:39:07 +03:00
if ( config - > test_ctl_mask )
regmap_update_bits ( regmap , PLL_TEST_CTL ( pll ) ,
config - > test_ctl_mask ,
config - > test_ctl_val ) ;
else
clk_alpha_pll_write_config ( regmap , PLL_TEST_CTL ( pll ) ,
config - > test_ctl_val ) ;
if ( config - > test_ctl_hi_mask )
regmap_update_bits ( regmap , PLL_TEST_CTL_U ( pll ) ,
config - > test_ctl_hi_mask ,
config - > test_ctl_hi_val ) ;
else
clk_alpha_pll_write_config ( regmap , PLL_TEST_CTL_U ( pll ) ,
config - > test_ctl_hi_val ) ;
2023-01-13 15:05:32 +03:00
2016-09-29 11:35:45 +03:00
if ( pll - > flags & SUPPORTS_FSM_MODE )
2017-09-28 20:50:40 +03:00
qcom_pll_set_fsm_mode ( regmap , PLL_MODE ( pll ) , 6 , 0 ) ;
2016-09-29 11:35:43 +03:00
}
2018-10-10 17:51:37 +03:00
EXPORT_SYMBOL_GPL ( clk_alpha_pll_configure ) ;
2016-09-29 11:35:43 +03:00
2016-09-29 11:35:42 +03:00
static int clk_alpha_pll_hwfsm_enable ( struct clk_hw * hw )
{
int ret ;
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
2017-09-28 20:50:40 +03:00
u32 val ;
2016-09-29 11:35:42 +03:00
2017-09-28 20:50:40 +03:00
ret = regmap_read ( pll - > clkr . regmap , PLL_MODE ( pll ) , & val ) ;
2016-09-29 11:35:42 +03:00
if ( ret )
return ret ;
val | = PLL_FSM_ENA ;
if ( pll - > flags & SUPPORTS_OFFLINE_REQ )
val & = ~ PLL_OFFLINE_REQ ;
2017-09-28 20:50:40 +03:00
ret = regmap_write ( pll - > clkr . regmap , PLL_MODE ( pll ) , val ) ;
2016-09-29 11:35:42 +03:00
if ( ret )
return ret ;
/* Make sure enable request goes through before waiting for update */
mb ( ) ;
return wait_for_pll_enable_active ( pll ) ;
}
static void clk_alpha_pll_hwfsm_disable ( struct clk_hw * hw )
{
int ret ;
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
2017-09-28 20:50:40 +03:00
u32 val ;
2016-09-29 11:35:42 +03:00
2017-09-28 20:50:40 +03:00
ret = regmap_read ( pll - > clkr . regmap , PLL_MODE ( pll ) , & val ) ;
2016-09-29 11:35:42 +03:00
if ( ret )
return ;
if ( pll - > flags & SUPPORTS_OFFLINE_REQ ) {
2017-09-28 20:50:40 +03:00
ret = regmap_update_bits ( pll - > clkr . regmap , PLL_MODE ( pll ) ,
2016-09-29 11:35:42 +03:00
PLL_OFFLINE_REQ , PLL_OFFLINE_REQ ) ;
if ( ret )
return ;
ret = wait_for_pll_offline ( pll ) ;
if ( ret )
return ;
}
/* Disable hwfsm */
2017-09-28 20:50:40 +03:00
ret = regmap_update_bits ( pll - > clkr . regmap , PLL_MODE ( pll ) ,
2016-09-29 11:35:42 +03:00
PLL_FSM_ENA , 0 ) ;
if ( ret )
return ;
wait_for_pll_disable ( pll ) ;
}
2016-09-29 11:35:46 +03:00
static int pll_is_enabled ( struct clk_hw * hw , u32 mask )
{
int ret ;
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
2017-09-28 20:50:40 +03:00
u32 val ;
2016-09-29 11:35:46 +03:00
2017-09-28 20:50:40 +03:00
ret = regmap_read ( pll - > clkr . regmap , PLL_MODE ( pll ) , & val ) ;
2016-09-29 11:35:46 +03:00
if ( ret )
return ret ;
return ! ! ( val & mask ) ;
}
static int clk_alpha_pll_hwfsm_is_enabled ( struct clk_hw * hw )
{
return pll_is_enabled ( hw , PLL_ACTIVE_FLAG ) ;
}
static int clk_alpha_pll_is_enabled ( struct clk_hw * hw )
{
return pll_is_enabled ( hw , PLL_LOCK_DET ) ;
}
2015-12-01 04:31:39 +03:00
static int clk_alpha_pll_enable ( struct clk_hw * hw )
{
int ret ;
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
2017-09-28 20:50:40 +03:00
u32 val , mask ;
2015-12-01 04:31:39 +03:00
mask = PLL_OUTCTRL | PLL_RESET_N | PLL_BYPASSNL ;
2017-09-28 20:50:40 +03:00
ret = regmap_read ( pll - > clkr . regmap , PLL_MODE ( pll ) , & val ) ;
2015-12-01 04:31:39 +03:00
if ( ret )
return ret ;
/* If in FSM mode, just vote for it */
if ( val & PLL_VOTE_FSM_ENA ) {
ret = clk_enable_regmap ( hw ) ;
if ( ret )
return ret ;
2016-09-29 11:35:42 +03:00
return wait_for_pll_enable_active ( pll ) ;
2015-12-01 04:31:39 +03:00
}
/* Skip if already enabled */
if ( ( val & mask ) = = mask )
return 0 ;
2017-09-28 20:50:40 +03:00
ret = regmap_update_bits ( pll - > clkr . regmap , PLL_MODE ( pll ) ,
2015-12-01 04:31:39 +03:00
PLL_BYPASSNL , PLL_BYPASSNL ) ;
if ( ret )
return ret ;
/*
* H / W requires a 5u s delay between disabling the bypass and
* de - asserting the reset .
*/
mb ( ) ;
udelay ( 5 ) ;
2017-09-28 20:50:40 +03:00
ret = regmap_update_bits ( pll - > clkr . regmap , PLL_MODE ( pll ) ,
2015-12-01 04:31:39 +03:00
PLL_RESET_N , PLL_RESET_N ) ;
if ( ret )
return ret ;
2016-09-29 11:35:42 +03:00
ret = wait_for_pll_enable_lock ( pll ) ;
2015-12-01 04:31:39 +03:00
if ( ret )
return ret ;
2017-09-28 20:50:40 +03:00
ret = regmap_update_bits ( pll - > clkr . regmap , PLL_MODE ( pll ) ,
2015-12-01 04:31:39 +03:00
PLL_OUTCTRL , PLL_OUTCTRL ) ;
/* Ensure that the write above goes through before returning. */
mb ( ) ;
return ret ;
}
static void clk_alpha_pll_disable ( struct clk_hw * hw )
{
int ret ;
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
2017-09-28 20:50:40 +03:00
u32 val , mask ;
2015-12-01 04:31:39 +03:00
2017-09-28 20:50:40 +03:00
ret = regmap_read ( pll - > clkr . regmap , PLL_MODE ( pll ) , & val ) ;
2015-12-01 04:31:39 +03:00
if ( ret )
return ;
/* If in FSM mode, just unvote it */
if ( val & PLL_VOTE_FSM_ENA ) {
clk_disable_regmap ( hw ) ;
return ;
}
mask = PLL_OUTCTRL ;
2017-09-28 20:50:40 +03:00
regmap_update_bits ( pll - > clkr . regmap , PLL_MODE ( pll ) , mask , 0 ) ;
2015-12-01 04:31:39 +03:00
/* Delay of 2 output clock ticks required until output is disabled */
mb ( ) ;
udelay ( 1 ) ;
mask = PLL_RESET_N | PLL_BYPASSNL ;
2017-09-28 20:50:40 +03:00
regmap_update_bits ( pll - > clkr . regmap , PLL_MODE ( pll ) , mask , 0 ) ;
2015-12-01 04:31:39 +03:00
}
2017-09-28 20:50:41 +03:00
static unsigned long
alpha_pll_calc_rate ( u64 prate , u32 l , u32 a , u32 alpha_width )
2015-12-01 04:31:39 +03:00
{
2017-09-28 20:50:41 +03:00
return ( prate * l ) + ( ( prate * a ) > > ALPHA_SHIFT ( alpha_width ) ) ;
2015-12-01 04:31:39 +03:00
}
static unsigned long
2017-09-28 20:50:41 +03:00
alpha_pll_round_rate ( unsigned long rate , unsigned long prate , u32 * l , u64 * a ,
u32 alpha_width )
2015-12-01 04:31:39 +03:00
{
u64 remainder ;
u64 quotient ;
quotient = rate ;
remainder = do_div ( quotient , prate ) ;
* l = quotient ;
if ( ! remainder ) {
* a = 0 ;
return rate ;
}
/* Upper ALPHA_BITWIDTH bits of Alpha */
2017-09-28 20:50:41 +03:00
quotient = remainder < < ALPHA_SHIFT ( alpha_width ) ;
2015-12-01 04:31:39 +03:00
remainder = do_div ( quotient , prate ) ;
if ( remainder )
quotient + + ;
* a = quotient ;
2017-09-28 20:50:41 +03:00
return alpha_pll_calc_rate ( prate , * l , * a , alpha_width ) ;
2015-12-01 04:31:39 +03:00
}
static const struct pll_vco *
alpha_pll_find_vco ( const struct clk_alpha_pll * pll , unsigned long rate )
{
const struct pll_vco * v = pll - > vco_table ;
const struct pll_vco * end = v + pll - > num_vco ;
for ( ; v < end ; v + + )
if ( rate > = v - > min_freq & & rate < = v - > max_freq )
return v ;
return NULL ;
}
static unsigned long
clk_alpha_pll_recalc_rate ( struct clk_hw * hw , unsigned long parent_rate )
{
u32 l , low , high , ctl ;
u64 a = 0 , prate = parent_rate ;
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
2017-09-28 20:50:41 +03:00
u32 alpha_width = pll_alpha_width ( pll ) ;
2015-12-01 04:31:39 +03:00
2017-09-28 20:50:40 +03:00
regmap_read ( pll - > clkr . regmap , PLL_L_VAL ( pll ) , & l ) ;
2015-12-01 04:31:39 +03:00
2017-09-28 20:50:40 +03:00
regmap_read ( pll - > clkr . regmap , PLL_USER_CTL ( pll ) , & ctl ) ;
2015-12-01 04:31:39 +03:00
if ( ctl & PLL_ALPHA_EN ) {
2017-09-28 20:50:40 +03:00
regmap_read ( pll - > clkr . regmap , PLL_ALPHA_VAL ( pll ) , & low ) ;
2017-09-28 20:50:41 +03:00
if ( alpha_width > 32 ) {
2017-09-28 20:50:40 +03:00
regmap_read ( pll - > clkr . regmap , PLL_ALPHA_VAL_U ( pll ) ,
2016-09-29 11:35:44 +03:00
& high ) ;
a = ( u64 ) high < < 32 | low ;
2017-09-28 20:50:41 +03:00
} else {
a = low & GENMASK ( alpha_width - 1 , 0 ) ;
2016-09-29 11:35:44 +03:00
}
2017-09-28 20:50:41 +03:00
if ( alpha_width > ALPHA_BITWIDTH )
a > > = alpha_width - ALPHA_BITWIDTH ;
2015-12-01 04:31:39 +03:00
}
2017-09-28 20:50:41 +03:00
return alpha_pll_calc_rate ( prate , l , a , alpha_width ) ;
2015-12-01 04:31:39 +03:00
}
2018-03-08 10:18:14 +03:00
static int __clk_alpha_pll_update_latch ( struct clk_alpha_pll * pll )
2017-09-28 20:50:45 +03:00
{
int ret ;
u32 mode ;
regmap_read ( pll - > clkr . regmap , PLL_MODE ( pll ) , & mode ) ;
/* Latch the input to the PLL */
regmap_update_bits ( pll - > clkr . regmap , PLL_MODE ( pll ) , PLL_UPDATE ,
PLL_UPDATE ) ;
/* Wait for 2 reference cycle before checking ACK bit */
udelay ( 1 ) ;
/*
* PLL will latch the new L , Alpha and freq control word .
* PLL will respond by raising PLL_ACK_LATCH output when new programming
* has been latched in and PLL is being updated . When
* UPDATE_LOGIC_BYPASS bit is not set , PLL_UPDATE will be cleared
* automatically by hardware when PLL_ACK_LATCH is asserted by PLL .
*/
if ( mode & PLL_UPDATE_BYPASS ) {
ret = wait_for_pll_update_ack_set ( pll ) ;
if ( ret )
return ret ;
regmap_update_bits ( pll - > clkr . regmap , PLL_MODE ( pll ) , PLL_UPDATE , 0 ) ;
} else {
ret = wait_for_pll_update ( pll ) ;
if ( ret )
return ret ;
}
ret = wait_for_pll_update_ack_clear ( pll ) ;
if ( ret )
return ret ;
/* Wait for PLL output to stabilize */
udelay ( 10 ) ;
return 0 ;
}
2018-03-08 10:18:14 +03:00
static int clk_alpha_pll_update_latch ( struct clk_alpha_pll * pll ,
int ( * is_enabled ) ( struct clk_hw * ) )
{
if ( ! is_enabled ( & pll - > clkr . hw ) | |
! ( pll - > flags & SUPPORTS_DYNAMIC_UPDATE ) )
return 0 ;
return __clk_alpha_pll_update_latch ( pll ) ;
}
2017-09-28 20:50:45 +03:00
static int __clk_alpha_pll_set_rate ( struct clk_hw * hw , unsigned long rate ,
unsigned long prate ,
int ( * is_enabled ) ( struct clk_hw * ) )
2015-12-01 04:31:39 +03:00
{
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
const struct pll_vco * vco ;
2017-09-28 20:50:41 +03:00
u32 l , alpha_width = pll_alpha_width ( pll ) ;
2015-12-01 04:31:39 +03:00
u64 a ;
2017-09-28 20:50:41 +03:00
rate = alpha_pll_round_rate ( rate , prate , & l , & a , alpha_width ) ;
2015-12-01 04:31:39 +03:00
vco = alpha_pll_find_vco ( pll , rate ) ;
2017-09-28 20:50:46 +03:00
if ( pll - > vco_table & & ! vco ) {
2020-02-05 09:54:21 +03:00
pr_err ( " %s: alpha pll not in a valid vco range \n " ,
clk_hw_get_name ( hw ) ) ;
2015-12-01 04:31:39 +03:00
return - EINVAL ;
}
2017-09-28 20:50:40 +03:00
regmap_write ( pll - > clkr . regmap , PLL_L_VAL ( pll ) , l ) ;
2016-09-29 11:35:44 +03:00
2017-09-28 20:50:41 +03:00
if ( alpha_width > ALPHA_BITWIDTH )
a < < = alpha_width - ALPHA_BITWIDTH ;
if ( alpha_width > 32 )
regmap_write ( pll - > clkr . regmap , PLL_ALPHA_VAL_U ( pll ) , a > > 32 ) ;
regmap_write ( pll - > clkr . regmap , PLL_ALPHA_VAL ( pll ) , a ) ;
2015-12-01 04:31:39 +03:00
2017-09-28 20:50:46 +03:00
if ( vco ) {
regmap_update_bits ( pll - > clkr . regmap , PLL_USER_CTL ( pll ) ,
PLL_VCO_MASK < < PLL_VCO_SHIFT ,
vco - > val < < PLL_VCO_SHIFT ) ;
}
2015-12-01 04:31:39 +03:00
2017-09-28 20:50:40 +03:00
regmap_update_bits ( pll - > clkr . regmap , PLL_USER_CTL ( pll ) ,
PLL_ALPHA_EN , PLL_ALPHA_EN ) ;
2015-12-01 04:31:39 +03:00
2017-09-28 20:50:45 +03:00
return clk_alpha_pll_update_latch ( pll , is_enabled ) ;
}
static int clk_alpha_pll_set_rate ( struct clk_hw * hw , unsigned long rate ,
unsigned long prate )
{
return __clk_alpha_pll_set_rate ( hw , rate , prate ,
clk_alpha_pll_is_enabled ) ;
}
static int clk_alpha_pll_hwfsm_set_rate ( struct clk_hw * hw , unsigned long rate ,
unsigned long prate )
{
return __clk_alpha_pll_set_rate ( hw , rate , prate ,
clk_alpha_pll_hwfsm_is_enabled ) ;
2015-12-01 04:31:39 +03:00
}
static long clk_alpha_pll_round_rate ( struct clk_hw * hw , unsigned long rate ,
unsigned long * prate )
{
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
2017-09-28 20:50:41 +03:00
u32 l , alpha_width = pll_alpha_width ( pll ) ;
2015-12-01 04:31:39 +03:00
u64 a ;
unsigned long min_freq , max_freq ;
2017-09-28 20:50:41 +03:00
rate = alpha_pll_round_rate ( rate , * prate , & l , & a , alpha_width ) ;
2017-09-28 20:50:46 +03:00
if ( ! pll - > vco_table | | alpha_pll_find_vco ( pll , rate ) )
2015-12-01 04:31:39 +03:00
return rate ;
min_freq = pll - > vco_table [ 0 ] . min_freq ;
max_freq = pll - > vco_table [ pll - > num_vco - 1 ] . max_freq ;
return clamp ( rate , min_freq , max_freq ) ;
}
2017-09-28 20:50:46 +03:00
static unsigned long
alpha_huayra_pll_calc_rate ( u64 prate , u32 l , u32 a )
{
/*
2020-09-24 09:55:04 +03:00
* a contains 16 bit alpha_val in two ’ s complement number in the range
2017-09-28 20:50:46 +03:00
* of [ - 0.5 , 0.5 ) .
*/
if ( a > = BIT ( PLL_HUAYRA_ALPHA_WIDTH - 1 ) )
l - = 1 ;
return ( prate * l ) + ( prate * a > > PLL_HUAYRA_ALPHA_WIDTH ) ;
}
static unsigned long
alpha_huayra_pll_round_rate ( unsigned long rate , unsigned long prate ,
u32 * l , u32 * a )
{
u64 remainder ;
u64 quotient ;
quotient = rate ;
remainder = do_div ( quotient , prate ) ;
* l = quotient ;
if ( ! remainder ) {
* a = 0 ;
return rate ;
}
quotient = remainder < < PLL_HUAYRA_ALPHA_WIDTH ;
remainder = do_div ( quotient , prate ) ;
if ( remainder )
quotient + + ;
/*
2020-09-24 09:55:04 +03:00
* alpha_val should be in two ’ s complement number in the range
2017-09-28 20:50:46 +03:00
* of [ - 0.5 , 0.5 ) so if quotient > = 0.5 then increment the l value
* since alpha value will be subtracted in this case .
*/
if ( quotient > = BIT ( PLL_HUAYRA_ALPHA_WIDTH - 1 ) )
* l + = 1 ;
* a = quotient ;
return alpha_huayra_pll_calc_rate ( prate , * l , * a ) ;
}
static unsigned long
alpha_pll_huayra_recalc_rate ( struct clk_hw * hw , unsigned long parent_rate )
{
u64 rate = parent_rate , tmp ;
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
u32 l , alpha = 0 , ctl , alpha_m , alpha_n ;
regmap_read ( pll - > clkr . regmap , PLL_L_VAL ( pll ) , & l ) ;
regmap_read ( pll - > clkr . regmap , PLL_USER_CTL ( pll ) , & ctl ) ;
if ( ctl & PLL_ALPHA_EN ) {
regmap_read ( pll - > clkr . regmap , PLL_ALPHA_VAL ( pll ) , & alpha ) ;
/*
* Depending upon alpha_mode , it can be treated as M / N value or
2020-09-24 09:55:04 +03:00
* as a two ’ s complement number . When alpha_mode = 1 ,
2017-09-28 20:50:46 +03:00
* pll_alpha_val < 15 : 8 > = M and pll_apla_val < 7 : 0 > = N
*
* Fout = FIN * ( L + ( M / N ) )
*
* M is a signed number ( - 128 to 127 ) and N is unsigned
* ( 0 to 255 ) . M / N has to be within + / - 0.5 .
*
2020-09-24 09:55:04 +03:00
* When alpha_mode = 0 , it is a two ’ s complement number in the
2017-09-28 20:50:46 +03:00
* range [ - 0.5 , 0.5 ) .
*
* Fout = FIN * ( L + ( alpha_val ) / 2 ^ 16 )
*
2020-09-24 09:55:04 +03:00
* where alpha_val is two ’ s complement number .
2017-09-28 20:50:46 +03:00
*/
if ( ! ( ctl & PLL_ALPHA_MODE ) )
return alpha_huayra_pll_calc_rate ( rate , l , alpha ) ;
alpha_m = alpha > > PLL_HUAYRA_M_SHIFT & PLL_HUAYRA_M_MASK ;
alpha_n = alpha > > PLL_HUAYRA_N_SHIFT & PLL_HUAYRA_N_MASK ;
rate * = l ;
tmp = parent_rate ;
if ( alpha_m > = BIT ( PLL_HUAYRA_M_WIDTH - 1 ) ) {
alpha_m = BIT ( PLL_HUAYRA_M_WIDTH ) - alpha_m ;
tmp * = alpha_m ;
do_div ( tmp , alpha_n ) ;
rate - = tmp ;
} else {
tmp * = alpha_m ;
do_div ( tmp , alpha_n ) ;
rate + = tmp ;
}
return rate ;
}
return alpha_huayra_pll_calc_rate ( rate , l , alpha ) ;
}
static int alpha_pll_huayra_set_rate ( struct clk_hw * hw , unsigned long rate ,
unsigned long prate )
{
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
u32 l , a , ctl , cur_alpha = 0 ;
rate = alpha_huayra_pll_round_rate ( rate , prate , & l , & a ) ;
regmap_read ( pll - > clkr . regmap , PLL_USER_CTL ( pll ) , & ctl ) ;
if ( ctl & PLL_ALPHA_EN )
regmap_read ( pll - > clkr . regmap , PLL_ALPHA_VAL ( pll ) , & cur_alpha ) ;
/*
* Huayra PLL supports PLL dynamic programming . User can change L_VAL ,
* without having to go through the power on sequence .
*/
if ( clk_alpha_pll_is_enabled ( hw ) ) {
if ( cur_alpha ! = a ) {
2020-02-05 09:54:21 +03:00
pr_err ( " %s: clock needs to be gated \n " ,
2017-09-28 20:50:46 +03:00
clk_hw_get_name ( hw ) ) ;
return - EBUSY ;
}
regmap_write ( pll - > clkr . regmap , PLL_L_VAL ( pll ) , l ) ;
/* Ensure that the write above goes to detect L val change. */
mb ( ) ;
return wait_for_pll_enable_lock ( pll ) ;
}
regmap_write ( pll - > clkr . regmap , PLL_L_VAL ( pll ) , l ) ;
regmap_write ( pll - > clkr . regmap , PLL_ALPHA_VAL ( pll ) , a ) ;
if ( a = = 0 )
regmap_update_bits ( pll - > clkr . regmap , PLL_USER_CTL ( pll ) ,
PLL_ALPHA_EN , 0x0 ) ;
else
regmap_update_bits ( pll - > clkr . regmap , PLL_USER_CTL ( pll ) ,
PLL_ALPHA_EN | PLL_ALPHA_MODE , PLL_ALPHA_EN ) ;
return 0 ;
}
static long alpha_pll_huayra_round_rate ( struct clk_hw * hw , unsigned long rate ,
unsigned long * prate )
{
u32 l , a ;
return alpha_huayra_pll_round_rate ( rate , * prate , & l , & a ) ;
}
2019-07-22 10:43:46 +03:00
static int trion_pll_is_enabled ( struct clk_alpha_pll * pll ,
struct regmap * regmap )
{
2021-01-27 10:08:07 +03:00
u32 mode_val , opmode_val ;
2019-07-22 10:43:46 +03:00
int ret ;
2021-01-27 10:08:07 +03:00
ret = regmap_read ( regmap , PLL_MODE ( pll ) , & mode_val ) ;
ret | = regmap_read ( regmap , PLL_OPMODE ( pll ) , & opmode_val ) ;
2019-07-22 10:43:46 +03:00
if ( ret )
return 0 ;
2021-01-27 10:08:07 +03:00
return ( ( opmode_val & PLL_RUN ) & & ( mode_val & PLL_OUTCTRL ) ) ;
2019-07-22 10:43:46 +03:00
}
static int clk_trion_pll_is_enabled ( struct clk_hw * hw )
{
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
return trion_pll_is_enabled ( pll , pll - > clkr . regmap ) ;
}
static int clk_trion_pll_enable ( struct clk_hw * hw )
{
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
struct regmap * regmap = pll - > clkr . regmap ;
u32 val ;
int ret ;
ret = regmap_read ( regmap , PLL_MODE ( pll ) , & val ) ;
if ( ret )
return ret ;
/* If in FSM mode, just vote for it */
if ( val & PLL_VOTE_FSM_ENA ) {
ret = clk_enable_regmap ( hw ) ;
if ( ret )
return ret ;
return wait_for_pll_enable_active ( pll ) ;
}
/* Set operation mode to RUN */
2020-02-24 07:49:59 +03:00
regmap_write ( regmap , PLL_OPMODE ( pll ) , PLL_RUN ) ;
2019-07-22 10:43:46 +03:00
ret = wait_for_pll_enable_lock ( pll ) ;
if ( ret )
return ret ;
/* Enable the PLL outputs */
ret = regmap_update_bits ( regmap , PLL_USER_CTL ( pll ) ,
2020-02-24 07:49:59 +03:00
PLL_OUT_MASK , PLL_OUT_MASK ) ;
2019-07-22 10:43:46 +03:00
if ( ret )
return ret ;
/* Enable the global PLL outputs */
return regmap_update_bits ( regmap , PLL_MODE ( pll ) ,
PLL_OUTCTRL , PLL_OUTCTRL ) ;
}
static void clk_trion_pll_disable ( struct clk_hw * hw )
{
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
struct regmap * regmap = pll - > clkr . regmap ;
u32 val ;
int ret ;
ret = regmap_read ( regmap , PLL_MODE ( pll ) , & val ) ;
if ( ret )
return ;
/* If in FSM mode, just unvote it */
if ( val & PLL_VOTE_FSM_ENA ) {
clk_disable_regmap ( hw ) ;
return ;
}
/* Disable the global PLL output */
ret = regmap_update_bits ( regmap , PLL_MODE ( pll ) , PLL_OUTCTRL , 0 ) ;
if ( ret )
return ;
/* Disable the PLL outputs */
ret = regmap_update_bits ( regmap , PLL_USER_CTL ( pll ) ,
2020-02-24 07:49:59 +03:00
PLL_OUT_MASK , 0 ) ;
2019-07-22 10:43:46 +03:00
if ( ret )
return ;
/* Place the PLL mode in STANDBY */
2020-02-24 07:49:59 +03:00
regmap_write ( regmap , PLL_OPMODE ( pll ) , PLL_STANDBY ) ;
2019-07-22 10:43:46 +03:00
regmap_update_bits ( regmap , PLL_MODE ( pll ) , PLL_RESET_N , PLL_RESET_N ) ;
}
static unsigned long
clk_trion_pll_recalc_rate ( struct clk_hw * hw , unsigned long parent_rate )
{
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
2020-02-24 07:50:00 +03:00
u32 l , frac , alpha_width = pll_alpha_width ( pll ) ;
2019-07-22 10:43:46 +03:00
2020-02-24 07:50:00 +03:00
regmap_read ( pll - > clkr . regmap , PLL_L_VAL ( pll ) , & l ) ;
regmap_read ( pll - > clkr . regmap , PLL_ALPHA_VAL ( pll ) , & frac ) ;
2019-07-22 10:43:46 +03:00
2020-02-24 07:50:00 +03:00
return alpha_pll_calc_rate ( parent_rate , l , frac , alpha_width ) ;
2019-07-22 10:43:46 +03:00
}
2019-11-25 16:59:04 +03:00
const struct clk_ops clk_alpha_pll_fixed_ops = {
. enable = clk_alpha_pll_enable ,
. disable = clk_alpha_pll_disable ,
. is_enabled = clk_alpha_pll_is_enabled ,
. recalc_rate = clk_alpha_pll_recalc_rate ,
} ;
EXPORT_SYMBOL_GPL ( clk_alpha_pll_fixed_ops ) ;
2015-12-01 04:31:39 +03:00
const struct clk_ops clk_alpha_pll_ops = {
. enable = clk_alpha_pll_enable ,
. disable = clk_alpha_pll_disable ,
2016-09-29 11:35:46 +03:00
. is_enabled = clk_alpha_pll_is_enabled ,
2015-12-01 04:31:39 +03:00
. recalc_rate = clk_alpha_pll_recalc_rate ,
. round_rate = clk_alpha_pll_round_rate ,
. set_rate = clk_alpha_pll_set_rate ,
} ;
EXPORT_SYMBOL_GPL ( clk_alpha_pll_ops ) ;
2017-09-28 20:50:46 +03:00
const struct clk_ops clk_alpha_pll_huayra_ops = {
. enable = clk_alpha_pll_enable ,
. disable = clk_alpha_pll_disable ,
. is_enabled = clk_alpha_pll_is_enabled ,
. recalc_rate = alpha_pll_huayra_recalc_rate ,
. round_rate = alpha_pll_huayra_round_rate ,
. set_rate = alpha_pll_huayra_set_rate ,
} ;
EXPORT_SYMBOL_GPL ( clk_alpha_pll_huayra_ops ) ;
2016-09-29 11:35:42 +03:00
const struct clk_ops clk_alpha_pll_hwfsm_ops = {
. enable = clk_alpha_pll_hwfsm_enable ,
. disable = clk_alpha_pll_hwfsm_disable ,
2016-09-29 11:35:46 +03:00
. is_enabled = clk_alpha_pll_hwfsm_is_enabled ,
2016-09-29 11:35:42 +03:00
. recalc_rate = clk_alpha_pll_recalc_rate ,
. round_rate = clk_alpha_pll_round_rate ,
2017-09-28 20:50:45 +03:00
. set_rate = clk_alpha_pll_hwfsm_set_rate ,
2016-09-29 11:35:42 +03:00
} ;
EXPORT_SYMBOL_GPL ( clk_alpha_pll_hwfsm_ops ) ;
2020-07-09 16:52:34 +03:00
const struct clk_ops clk_alpha_pll_fixed_trion_ops = {
2019-07-22 10:43:46 +03:00
. enable = clk_trion_pll_enable ,
. disable = clk_trion_pll_disable ,
. is_enabled = clk_trion_pll_is_enabled ,
. recalc_rate = clk_trion_pll_recalc_rate ,
2020-02-24 07:50:00 +03:00
. round_rate = clk_alpha_pll_round_rate ,
2019-07-22 10:43:46 +03:00
} ;
2020-07-09 16:52:34 +03:00
EXPORT_SYMBOL_GPL ( clk_alpha_pll_fixed_trion_ops ) ;
2019-07-22 10:43:46 +03:00
2015-12-01 04:31:39 +03:00
static unsigned long
clk_alpha_pll_postdiv_recalc_rate ( struct clk_hw * hw , unsigned long parent_rate )
{
struct clk_alpha_pll_postdiv * pll = to_clk_alpha_pll_postdiv ( hw ) ;
u32 ctl ;
2017-09-28 20:50:40 +03:00
regmap_read ( pll - > clkr . regmap , PLL_USER_CTL ( pll ) , & ctl ) ;
2015-12-01 04:31:39 +03:00
ctl > > = PLL_POST_DIV_SHIFT ;
2017-09-28 20:50:49 +03:00
ctl & = PLL_POST_DIV_MASK ( pll ) ;
2015-12-01 04:31:39 +03:00
return parent_rate > > fls ( ctl ) ;
}
static const struct clk_div_table clk_alpha_div_table [ ] = {
{ 0x0 , 1 } ,
{ 0x1 , 2 } ,
{ 0x3 , 4 } ,
{ 0x7 , 8 } ,
{ 0xf , 16 } ,
{ }
} ;
2017-09-28 20:50:49 +03:00
static const struct clk_div_table clk_alpha_2bit_div_table [ ] = {
{ 0x0 , 1 } ,
{ 0x1 , 2 } ,
{ 0x3 , 4 } ,
{ }
} ;
2015-12-01 04:31:39 +03:00
static long
clk_alpha_pll_postdiv_round_rate ( struct clk_hw * hw , unsigned long rate ,
unsigned long * prate )
{
struct clk_alpha_pll_postdiv * pll = to_clk_alpha_pll_postdiv ( hw ) ;
2017-09-28 20:50:49 +03:00
const struct clk_div_table * table ;
if ( pll - > width = = 2 )
table = clk_alpha_2bit_div_table ;
else
table = clk_alpha_div_table ;
2015-12-01 04:31:39 +03:00
2017-09-28 20:50:49 +03:00
return divider_round_rate ( hw , rate , prate , table ,
2015-12-01 04:31:39 +03:00
pll - > width , CLK_DIVIDER_POWER_OF_TWO ) ;
}
2017-09-28 20:50:50 +03:00
static long
clk_alpha_pll_postdiv_round_ro_rate ( struct clk_hw * hw , unsigned long rate ,
unsigned long * prate )
{
struct clk_alpha_pll_postdiv * pll = to_clk_alpha_pll_postdiv ( hw ) ;
u32 ctl , div ;
regmap_read ( pll - > clkr . regmap , PLL_USER_CTL ( pll ) , & ctl ) ;
ctl > > = PLL_POST_DIV_SHIFT ;
ctl & = BIT ( pll - > width ) - 1 ;
div = 1 < < fls ( ctl ) ;
if ( clk_hw_get_flags ( hw ) & CLK_SET_RATE_PARENT )
* prate = clk_hw_round_rate ( clk_hw_get_parent ( hw ) , div * rate ) ;
return DIV_ROUND_UP_ULL ( ( u64 ) * prate , div ) ;
}
2015-12-01 04:31:39 +03:00
static int clk_alpha_pll_postdiv_set_rate ( struct clk_hw * hw , unsigned long rate ,
unsigned long parent_rate )
{
struct clk_alpha_pll_postdiv * pll = to_clk_alpha_pll_postdiv ( hw ) ;
int div ;
/* 16 -> 0xf, 8 -> 0x7, 4 -> 0x3, 2 -> 0x1, 1 -> 0x0 */
2019-07-22 10:43:44 +03:00
div = DIV_ROUND_UP_ULL ( parent_rate , rate ) - 1 ;
2015-12-01 04:31:39 +03:00
2017-09-28 20:50:40 +03:00
return regmap_update_bits ( pll - > clkr . regmap , PLL_USER_CTL ( pll ) ,
2017-09-28 20:50:49 +03:00
PLL_POST_DIV_MASK ( pll ) < < PLL_POST_DIV_SHIFT ,
2015-12-01 04:31:39 +03:00
div < < PLL_POST_DIV_SHIFT ) ;
}
const struct clk_ops clk_alpha_pll_postdiv_ops = {
. recalc_rate = clk_alpha_pll_postdiv_recalc_rate ,
. round_rate = clk_alpha_pll_postdiv_round_rate ,
. set_rate = clk_alpha_pll_postdiv_set_rate ,
} ;
EXPORT_SYMBOL_GPL ( clk_alpha_pll_postdiv_ops ) ;
2017-09-28 20:50:50 +03:00
const struct clk_ops clk_alpha_pll_postdiv_ro_ops = {
. round_rate = clk_alpha_pll_postdiv_round_ro_rate ,
. recalc_rate = clk_alpha_pll_postdiv_recalc_rate ,
} ;
EXPORT_SYMBOL_GPL ( clk_alpha_pll_postdiv_ro_ops ) ;
2018-03-08 10:18:14 +03:00
void clk_fabia_pll_configure ( struct clk_alpha_pll * pll , struct regmap * regmap ,
const struct alpha_pll_config * config )
{
u32 val , mask ;
2020-10-16 21:43:32 +03:00
clk_alpha_pll_write_config ( regmap , PLL_L_VAL ( pll ) , config - > l ) ;
clk_alpha_pll_write_config ( regmap , PLL_FRAC ( pll ) , config - > alpha ) ;
clk_alpha_pll_write_config ( regmap , PLL_CONFIG_CTL ( pll ) ,
2018-03-08 10:18:14 +03:00
config - > config_ctl_val ) ;
2020-10-16 21:43:32 +03:00
clk_alpha_pll_write_config ( regmap , PLL_CONFIG_CTL_U ( pll ) ,
2019-11-15 13:04:58 +03:00
config - > config_ctl_hi_val ) ;
2020-10-16 21:43:32 +03:00
clk_alpha_pll_write_config ( regmap , PLL_USER_CTL ( pll ) ,
config - > user_ctl_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_USER_CTL_U ( pll ) ,
2019-11-15 13:04:58 +03:00
config - > user_ctl_hi_val ) ;
2020-10-16 21:43:32 +03:00
clk_alpha_pll_write_config ( regmap , PLL_TEST_CTL ( pll ) ,
2019-11-15 13:04:58 +03:00
config - > test_ctl_val ) ;
2020-10-16 21:43:32 +03:00
clk_alpha_pll_write_config ( regmap , PLL_TEST_CTL_U ( pll ) ,
2019-11-15 13:04:58 +03:00
config - > test_ctl_hi_val ) ;
2018-03-08 10:18:14 +03:00
if ( config - > post_div_mask ) {
mask = config - > post_div_mask ;
val = config - > post_div_val ;
regmap_update_bits ( regmap , PLL_USER_CTL ( pll ) , mask , val ) ;
}
2022-09-21 03:13:01 +03:00
if ( pll - > flags & SUPPORTS_FSM_LEGACY_MODE )
regmap_update_bits ( regmap , PLL_MODE ( pll ) , PLL_FSM_LEGACY_MODE ,
PLL_FSM_LEGACY_MODE ) ;
2018-03-08 10:18:14 +03:00
regmap_update_bits ( regmap , PLL_MODE ( pll ) , PLL_UPDATE_BYPASS ,
PLL_UPDATE_BYPASS ) ;
regmap_update_bits ( regmap , PLL_MODE ( pll ) , PLL_RESET_N , PLL_RESET_N ) ;
}
2018-06-02 10:19:07 +03:00
EXPORT_SYMBOL_GPL ( clk_fabia_pll_configure ) ;
2018-03-08 10:18:14 +03:00
static int alpha_pll_fabia_enable ( struct clk_hw * hw )
{
int ret ;
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
u32 val , opmode_val ;
struct regmap * regmap = pll - > clkr . regmap ;
ret = regmap_read ( regmap , PLL_MODE ( pll ) , & val ) ;
if ( ret )
return ret ;
/* If in FSM mode, just vote for it */
if ( val & PLL_VOTE_FSM_ENA ) {
ret = clk_enable_regmap ( hw ) ;
if ( ret )
return ret ;
return wait_for_pll_enable_active ( pll ) ;
}
ret = regmap_read ( regmap , PLL_OPMODE ( pll ) , & opmode_val ) ;
if ( ret )
return ret ;
/* Skip If PLL is already running */
2020-02-24 07:49:59 +03:00
if ( ( opmode_val & PLL_RUN ) & & ( val & PLL_OUTCTRL ) )
2018-03-08 10:18:14 +03:00
return 0 ;
ret = regmap_update_bits ( regmap , PLL_MODE ( pll ) , PLL_OUTCTRL , 0 ) ;
if ( ret )
return ret ;
2020-02-24 07:49:59 +03:00
ret = regmap_write ( regmap , PLL_OPMODE ( pll ) , PLL_STANDBY ) ;
2018-03-08 10:18:14 +03:00
if ( ret )
return ret ;
ret = regmap_update_bits ( regmap , PLL_MODE ( pll ) , PLL_RESET_N ,
PLL_RESET_N ) ;
if ( ret )
return ret ;
2020-02-24 07:49:59 +03:00
ret = regmap_write ( regmap , PLL_OPMODE ( pll ) , PLL_RUN ) ;
2018-03-08 10:18:14 +03:00
if ( ret )
return ret ;
ret = wait_for_pll_enable_lock ( pll ) ;
if ( ret )
return ret ;
ret = regmap_update_bits ( regmap , PLL_USER_CTL ( pll ) ,
2020-02-24 07:49:59 +03:00
PLL_OUT_MASK , PLL_OUT_MASK ) ;
2018-03-08 10:18:14 +03:00
if ( ret )
return ret ;
return regmap_update_bits ( regmap , PLL_MODE ( pll ) , PLL_OUTCTRL ,
PLL_OUTCTRL ) ;
}
static void alpha_pll_fabia_disable ( struct clk_hw * hw )
{
int ret ;
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
u32 val ;
struct regmap * regmap = pll - > clkr . regmap ;
ret = regmap_read ( regmap , PLL_MODE ( pll ) , & val ) ;
if ( ret )
return ;
/* If in FSM mode, just unvote it */
if ( val & PLL_FSM_ENA ) {
clk_disable_regmap ( hw ) ;
return ;
}
ret = regmap_update_bits ( regmap , PLL_MODE ( pll ) , PLL_OUTCTRL , 0 ) ;
if ( ret )
return ;
/* Disable main outputs */
2020-02-24 07:49:59 +03:00
ret = regmap_update_bits ( regmap , PLL_USER_CTL ( pll ) , PLL_OUT_MASK , 0 ) ;
2018-03-08 10:18:14 +03:00
if ( ret )
return ;
/* Place the PLL in STANDBY */
2020-02-24 07:49:59 +03:00
regmap_write ( regmap , PLL_OPMODE ( pll ) , PLL_STANDBY ) ;
2018-03-08 10:18:14 +03:00
}
static unsigned long alpha_pll_fabia_recalc_rate ( struct clk_hw * hw ,
unsigned long parent_rate )
{
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
u32 l , frac , alpha_width = pll_alpha_width ( pll ) ;
regmap_read ( pll - > clkr . regmap , PLL_L_VAL ( pll ) , & l ) ;
regmap_read ( pll - > clkr . regmap , PLL_FRAC ( pll ) , & frac ) ;
return alpha_pll_calc_rate ( parent_rate , l , frac , alpha_width ) ;
}
2020-10-16 21:43:32 +03:00
/*
* Due to limited number of bits for fractional rate programming , the
* rounded up rate could be marginally higher than the requested rate .
*/
static int alpha_pll_check_rate_margin ( struct clk_hw * hw ,
unsigned long rrate , unsigned long rate )
{
unsigned long rate_margin = rate + PLL_RATE_MARGIN ;
if ( rrate > rate_margin | | rrate < rate ) {
pr_err ( " %s: Rounded rate %lu not within range [%lu, %lu) \n " ,
clk_hw_get_name ( hw ) , rrate , rate , rate_margin ) ;
return - EINVAL ;
}
return 0 ;
}
2018-03-08 10:18:14 +03:00
static int alpha_pll_fabia_set_rate ( struct clk_hw * hw , unsigned long rate ,
unsigned long prate )
{
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
2019-11-15 13:04:57 +03:00
u32 l , alpha_width = pll_alpha_width ( pll ) ;
2020-10-16 21:43:32 +03:00
unsigned long rrate ;
int ret ;
2018-03-08 10:18:14 +03:00
u64 a ;
rrate = alpha_pll_round_rate ( rate , prate , & l , & a , alpha_width ) ;
2020-10-16 21:43:32 +03:00
ret = alpha_pll_check_rate_margin ( hw , rrate , rate ) ;
if ( ret < 0 )
return ret ;
2018-03-08 10:18:14 +03:00
regmap_write ( pll - > clkr . regmap , PLL_L_VAL ( pll ) , l ) ;
regmap_write ( pll - > clkr . regmap , PLL_FRAC ( pll ) , a ) ;
return __clk_alpha_pll_update_latch ( pll ) ;
}
2019-11-15 13:04:58 +03:00
static int alpha_pll_fabia_prepare ( struct clk_hw * hw )
{
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
const struct pll_vco * vco ;
struct clk_hw * parent_hw ;
unsigned long cal_freq , rrate ;
u32 cal_l , val , alpha_width = pll_alpha_width ( pll ) ;
2020-02-05 09:54:21 +03:00
const char * name = clk_hw_get_name ( hw ) ;
2019-11-15 13:04:58 +03:00
u64 a ;
int ret ;
/* Check if calibration needs to be done i.e. PLL is in reset */
ret = regmap_read ( pll - > clkr . regmap , PLL_MODE ( pll ) , & val ) ;
if ( ret )
return ret ;
/* Return early if calibration is not needed. */
if ( val & PLL_RESET_N )
return 0 ;
vco = alpha_pll_find_vco ( pll , clk_hw_get_rate ( hw ) ) ;
if ( ! vco ) {
2020-02-05 09:54:21 +03:00
pr_err ( " %s: alpha pll not in a valid vco range \n " , name ) ;
2019-11-15 13:04:58 +03:00
return - EINVAL ;
}
cal_freq = DIV_ROUND_CLOSEST ( ( pll - > vco_table [ 0 ] . min_freq +
pll - > vco_table [ 0 ] . max_freq ) * 54 , 100 ) ;
parent_hw = clk_hw_get_parent ( hw ) ;
if ( ! parent_hw )
return - EINVAL ;
rrate = alpha_pll_round_rate ( cal_freq , clk_hw_get_rate ( parent_hw ) ,
& cal_l , & a , alpha_width ) ;
2020-10-16 21:43:32 +03:00
ret = alpha_pll_check_rate_margin ( hw , rrate , cal_freq ) ;
if ( ret < 0 )
return ret ;
2019-11-15 13:04:58 +03:00
/* Setup PLL for calibration frequency */
2021-06-09 05:28:52 +03:00
regmap_write ( pll - > clkr . regmap , PLL_CAL_L_VAL ( pll ) , cal_l ) ;
2019-11-15 13:04:58 +03:00
/* Bringup the PLL at calibration frequency */
ret = clk_alpha_pll_enable ( hw ) ;
if ( ret ) {
2020-02-05 09:54:21 +03:00
pr_err ( " %s: alpha pll calibration failed \n " , name ) ;
2019-11-15 13:04:58 +03:00
return ret ;
}
clk_alpha_pll_disable ( hw ) ;
return 0 ;
}
2018-03-08 10:18:14 +03:00
const struct clk_ops clk_alpha_pll_fabia_ops = {
2019-11-15 13:04:58 +03:00
. prepare = alpha_pll_fabia_prepare ,
2018-03-08 10:18:14 +03:00
. enable = alpha_pll_fabia_enable ,
. disable = alpha_pll_fabia_disable ,
. is_enabled = clk_alpha_pll_is_enabled ,
. set_rate = alpha_pll_fabia_set_rate ,
. recalc_rate = alpha_pll_fabia_recalc_rate ,
. round_rate = clk_alpha_pll_round_rate ,
} ;
EXPORT_SYMBOL_GPL ( clk_alpha_pll_fabia_ops ) ;
const struct clk_ops clk_alpha_pll_fixed_fabia_ops = {
. enable = alpha_pll_fabia_enable ,
. disable = alpha_pll_fabia_disable ,
. is_enabled = clk_alpha_pll_is_enabled ,
. recalc_rate = alpha_pll_fabia_recalc_rate ,
. round_rate = clk_alpha_pll_round_rate ,
} ;
EXPORT_SYMBOL_GPL ( clk_alpha_pll_fixed_fabia_ops ) ;
static unsigned long clk_alpha_pll_postdiv_fabia_recalc_rate ( struct clk_hw * hw ,
unsigned long parent_rate )
{
struct clk_alpha_pll_postdiv * pll = to_clk_alpha_pll_postdiv ( hw ) ;
u32 i , div = 1 , val ;
int ret ;
ret = regmap_read ( pll - > clkr . regmap , PLL_USER_CTL ( pll ) , & val ) ;
if ( ret )
return ret ;
val > > = pll - > post_div_shift ;
val & = BIT ( pll - > width ) - 1 ;
for ( i = 0 ; i < pll - > num_post_div ; i + + ) {
if ( pll - > post_div_table [ i ] . val = = val ) {
div = pll - > post_div_table [ i ] . div ;
break ;
}
}
return ( parent_rate / div ) ;
}
2019-07-22 10:43:46 +03:00
static unsigned long
clk_trion_pll_postdiv_recalc_rate ( struct clk_hw * hw , unsigned long parent_rate )
{
struct clk_alpha_pll_postdiv * pll = to_clk_alpha_pll_postdiv ( hw ) ;
struct regmap * regmap = pll - > clkr . regmap ;
u32 i , div = 1 , val ;
regmap_read ( regmap , PLL_USER_CTL ( pll ) , & val ) ;
val > > = pll - > post_div_shift ;
val & = PLL_POST_DIV_MASK ( pll ) ;
for ( i = 0 ; i < pll - > num_post_div ; i + + ) {
if ( pll - > post_div_table [ i ] . val = = val ) {
div = pll - > post_div_table [ i ] . div ;
break ;
}
}
return ( parent_rate / div ) ;
}
static long
clk_trion_pll_postdiv_round_rate ( struct clk_hw * hw , unsigned long rate ,
unsigned long * prate )
{
struct clk_alpha_pll_postdiv * pll = to_clk_alpha_pll_postdiv ( hw ) ;
return divider_round_rate ( hw , rate , prate , pll - > post_div_table ,
pll - > width , CLK_DIVIDER_ROUND_CLOSEST ) ;
} ;
static int
clk_trion_pll_postdiv_set_rate ( struct clk_hw * hw , unsigned long rate ,
unsigned long parent_rate )
{
struct clk_alpha_pll_postdiv * pll = to_clk_alpha_pll_postdiv ( hw ) ;
struct regmap * regmap = pll - > clkr . regmap ;
int i , val = 0 , div ;
div = DIV_ROUND_UP_ULL ( parent_rate , rate ) ;
for ( i = 0 ; i < pll - > num_post_div ; i + + ) {
if ( pll - > post_div_table [ i ] . div = = div ) {
val = pll - > post_div_table [ i ] . val ;
break ;
}
}
return regmap_update_bits ( regmap , PLL_USER_CTL ( pll ) ,
PLL_POST_DIV_MASK ( pll ) < < PLL_POST_DIV_SHIFT ,
val < < PLL_POST_DIV_SHIFT ) ;
}
2020-07-09 16:52:34 +03:00
const struct clk_ops clk_alpha_pll_postdiv_trion_ops = {
2019-07-22 10:43:46 +03:00
. recalc_rate = clk_trion_pll_postdiv_recalc_rate ,
. round_rate = clk_trion_pll_postdiv_round_rate ,
. set_rate = clk_trion_pll_postdiv_set_rate ,
} ;
2020-07-09 16:52:34 +03:00
EXPORT_SYMBOL_GPL ( clk_alpha_pll_postdiv_trion_ops ) ;
2019-07-22 10:43:46 +03:00
2018-03-08 10:18:14 +03:00
static long clk_alpha_pll_postdiv_fabia_round_rate ( struct clk_hw * hw ,
unsigned long rate , unsigned long * prate )
{
struct clk_alpha_pll_postdiv * pll = to_clk_alpha_pll_postdiv ( hw ) ;
return divider_round_rate ( hw , rate , prate , pll - > post_div_table ,
pll - > width , CLK_DIVIDER_ROUND_CLOSEST ) ;
}
static int clk_alpha_pll_postdiv_fabia_set_rate ( struct clk_hw * hw ,
unsigned long rate , unsigned long parent_rate )
{
struct clk_alpha_pll_postdiv * pll = to_clk_alpha_pll_postdiv ( hw ) ;
int i , val = 0 , div , ret ;
/*
* If the PLL is in FSM mode , then treat set_rate callback as a
* no - operation .
*/
ret = regmap_read ( pll - > clkr . regmap , PLL_MODE ( pll ) , & val ) ;
if ( ret )
return ret ;
if ( val & PLL_VOTE_FSM_ENA )
return 0 ;
2019-07-22 10:43:44 +03:00
div = DIV_ROUND_UP_ULL ( parent_rate , rate ) ;
2018-03-08 10:18:14 +03:00
for ( i = 0 ; i < pll - > num_post_div ; i + + ) {
if ( pll - > post_div_table [ i ] . div = = div ) {
val = pll - > post_div_table [ i ] . val ;
break ;
}
}
return regmap_update_bits ( pll - > clkr . regmap , PLL_USER_CTL ( pll ) ,
( BIT ( pll - > width ) - 1 ) < < pll - > post_div_shift ,
val < < pll - > post_div_shift ) ;
}
const struct clk_ops clk_alpha_pll_postdiv_fabia_ops = {
. recalc_rate = clk_alpha_pll_postdiv_fabia_recalc_rate ,
. round_rate = clk_alpha_pll_postdiv_fabia_round_rate ,
. set_rate = clk_alpha_pll_postdiv_fabia_set_rate ,
} ;
EXPORT_SYMBOL_GPL ( clk_alpha_pll_postdiv_fabia_ops ) ;
2020-02-24 07:50:01 +03:00
/**
2022-07-01 09:27:11 +03:00
* clk_trion_pll_configure - configure the trion pll
2020-02-24 07:50:01 +03:00
*
* @ pll : clk alpha pll
* @ regmap : register map
* @ config : configuration to apply for pll
*/
2020-07-09 16:52:34 +03:00
void clk_trion_pll_configure ( struct clk_alpha_pll * pll , struct regmap * regmap ,
2020-02-24 07:50:01 +03:00
const struct alpha_pll_config * config )
{
2021-11-23 19:25:08 +03:00
/*
* If the bootloader left the PLL enabled it ' s likely that there are
* RCGs that will lock up if we disable the PLL below .
*/
if ( trion_pll_is_enabled ( pll , regmap ) ) {
pr_debug ( " Trion PLL is already enabled, skipping configuration \n " ) ;
return ;
}
2020-10-16 21:43:32 +03:00
clk_alpha_pll_write_config ( regmap , PLL_L_VAL ( pll ) , config - > l ) ;
2020-07-09 16:52:34 +03:00
regmap_write ( regmap , PLL_CAL_L_VAL ( pll ) , TRION_PLL_CAL_VAL ) ;
2020-10-16 21:43:32 +03:00
clk_alpha_pll_write_config ( regmap , PLL_ALPHA_VAL ( pll ) , config - > alpha ) ;
clk_alpha_pll_write_config ( regmap , PLL_CONFIG_CTL ( pll ) ,
config - > config_ctl_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_CONFIG_CTL_U ( pll ) ,
config - > config_ctl_hi_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_CONFIG_CTL_U1 ( pll ) ,
config - > config_ctl_hi1_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_USER_CTL ( pll ) ,
config - > user_ctl_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_USER_CTL_U ( pll ) ,
config - > user_ctl_hi_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_USER_CTL_U1 ( pll ) ,
config - > user_ctl_hi1_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_TEST_CTL ( pll ) ,
config - > test_ctl_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_TEST_CTL_U ( pll ) ,
config - > test_ctl_hi_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_TEST_CTL_U1 ( pll ) ,
config - > test_ctl_hi1_val ) ;
2020-02-24 07:50:01 +03:00
regmap_update_bits ( regmap , PLL_MODE ( pll ) , PLL_UPDATE_BYPASS ,
PLL_UPDATE_BYPASS ) ;
/* Disable PLL output */
regmap_update_bits ( regmap , PLL_MODE ( pll ) , PLL_OUTCTRL , 0 ) ;
/* Set operation mode to OFF */
regmap_write ( regmap , PLL_OPMODE ( pll ) , PLL_STANDBY ) ;
/* Place the PLL in STANDBY mode */
regmap_update_bits ( regmap , PLL_MODE ( pll ) , PLL_RESET_N , PLL_RESET_N ) ;
}
2020-07-09 16:52:34 +03:00
EXPORT_SYMBOL_GPL ( clk_trion_pll_configure ) ;
2020-02-24 07:50:01 +03:00
/*
2020-07-09 16:52:34 +03:00
* The TRION PLL requires a power - on self - calibration which happens when the
2020-02-24 07:50:01 +03:00
* PLL comes out of reset . Calibrate in case it is not completed .
*/
2020-07-09 16:52:35 +03:00
static int __alpha_pll_trion_prepare ( struct clk_hw * hw , u32 pcal_done )
2020-02-24 07:50:01 +03:00
{
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
2021-01-27 10:08:07 +03:00
u32 val ;
2020-02-24 07:50:01 +03:00
int ret ;
/* Return early if calibration is not needed. */
2021-01-27 10:08:07 +03:00
regmap_read ( pll - > clkr . regmap , PLL_STATUS ( pll ) , & val ) ;
if ( val & pcal_done )
2020-02-24 07:50:01 +03:00
return 0 ;
/* On/off to calibrate */
ret = clk_trion_pll_enable ( hw ) ;
if ( ! ret )
clk_trion_pll_disable ( hw ) ;
return ret ;
}
2020-07-09 16:52:35 +03:00
static int alpha_pll_trion_prepare ( struct clk_hw * hw )
{
return __alpha_pll_trion_prepare ( hw , TRION_PCAL_DONE ) ;
}
static int alpha_pll_lucid_prepare ( struct clk_hw * hw )
{
return __alpha_pll_trion_prepare ( hw , LUCID_PCAL_DONE ) ;
}
2021-01-27 10:08:08 +03:00
static int __alpha_pll_trion_set_rate ( struct clk_hw * hw , unsigned long rate ,
unsigned long prate , u32 latch_bit , u32 latch_ack )
2020-02-24 07:50:01 +03:00
{
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
unsigned long rrate ;
2021-01-27 10:08:07 +03:00
u32 val , l , alpha_width = pll_alpha_width ( pll ) ;
2020-02-24 07:50:01 +03:00
u64 a ;
int ret ;
rrate = alpha_pll_round_rate ( rate , prate , & l , & a , alpha_width ) ;
2020-10-16 21:43:32 +03:00
ret = alpha_pll_check_rate_margin ( hw , rrate , rate ) ;
if ( ret < 0 )
return ret ;
2020-02-24 07:50:01 +03:00
regmap_write ( pll - > clkr . regmap , PLL_L_VAL ( pll ) , l ) ;
regmap_write ( pll - > clkr . regmap , PLL_ALPHA_VAL ( pll ) , a ) ;
/* Latch the PLL input */
2021-01-27 10:08:08 +03:00
ret = regmap_update_bits ( pll - > clkr . regmap , PLL_MODE ( pll ) , latch_bit , latch_bit ) ;
2020-02-24 07:50:01 +03:00
if ( ret )
return ret ;
/* Wait for 2 reference cycles before checking the ACK bit. */
udelay ( 1 ) ;
2021-01-27 10:08:07 +03:00
regmap_read ( pll - > clkr . regmap , PLL_MODE ( pll ) , & val ) ;
2021-01-27 10:08:08 +03:00
if ( ! ( val & latch_ack ) ) {
2020-02-24 07:50:01 +03:00
pr_err ( " Lucid PLL latch failed. Output may be unstable! \n " ) ;
return - EINVAL ;
}
/* Return the latch input to 0 */
2021-01-27 10:08:08 +03:00
ret = regmap_update_bits ( pll - > clkr . regmap , PLL_MODE ( pll ) , latch_bit , 0 ) ;
2020-02-24 07:50:01 +03:00
if ( ret )
return ret ;
if ( clk_hw_is_enabled ( hw ) ) {
ret = wait_for_pll_enable_lock ( pll ) ;
if ( ret )
return ret ;
}
/* Wait for PLL output to stabilize */
udelay ( 100 ) ;
return 0 ;
}
2021-01-27 10:08:08 +03:00
static int alpha_pll_trion_set_rate ( struct clk_hw * hw , unsigned long rate ,
unsigned long prate )
{
return __alpha_pll_trion_set_rate ( hw , rate , prate , PLL_UPDATE , ALPHA_PLL_ACK_LATCH ) ;
}
2020-07-09 16:52:34 +03:00
const struct clk_ops clk_alpha_pll_trion_ops = {
. prepare = alpha_pll_trion_prepare ,
2020-02-24 07:50:01 +03:00
. enable = clk_trion_pll_enable ,
. disable = clk_trion_pll_disable ,
. is_enabled = clk_trion_pll_is_enabled ,
. recalc_rate = clk_trion_pll_recalc_rate ,
. round_rate = clk_alpha_pll_round_rate ,
2020-07-09 16:52:34 +03:00
. set_rate = alpha_pll_trion_set_rate ,
2020-02-24 07:50:01 +03:00
} ;
2020-07-09 16:52:35 +03:00
EXPORT_SYMBOL_GPL ( clk_alpha_pll_trion_ops ) ;
const struct clk_ops clk_alpha_pll_lucid_ops = {
. prepare = alpha_pll_lucid_prepare ,
. enable = clk_trion_pll_enable ,
. disable = clk_trion_pll_disable ,
. is_enabled = clk_trion_pll_is_enabled ,
. recalc_rate = clk_trion_pll_recalc_rate ,
. round_rate = clk_alpha_pll_round_rate ,
. set_rate = alpha_pll_trion_set_rate ,
} ;
2020-02-24 07:50:01 +03:00
EXPORT_SYMBOL_GPL ( clk_alpha_pll_lucid_ops ) ;
const struct clk_ops clk_alpha_pll_postdiv_lucid_ops = {
. recalc_rate = clk_alpha_pll_postdiv_fabia_recalc_rate ,
. round_rate = clk_alpha_pll_postdiv_fabia_round_rate ,
. set_rate = clk_alpha_pll_postdiv_fabia_set_rate ,
} ;
EXPORT_SYMBOL_GPL ( clk_alpha_pll_postdiv_lucid_ops ) ;
2020-10-16 21:43:33 +03:00
void clk_agera_pll_configure ( struct clk_alpha_pll * pll , struct regmap * regmap ,
const struct alpha_pll_config * config )
{
clk_alpha_pll_write_config ( regmap , PLL_L_VAL ( pll ) , config - > l ) ;
clk_alpha_pll_write_config ( regmap , PLL_ALPHA_VAL ( pll ) , config - > alpha ) ;
clk_alpha_pll_write_config ( regmap , PLL_USER_CTL ( pll ) ,
config - > user_ctl_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_CONFIG_CTL ( pll ) ,
config - > config_ctl_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_CONFIG_CTL_U ( pll ) ,
config - > config_ctl_hi_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_TEST_CTL ( pll ) ,
config - > test_ctl_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_TEST_CTL_U ( pll ) ,
config - > test_ctl_hi_val ) ;
}
EXPORT_SYMBOL_GPL ( clk_agera_pll_configure ) ;
static int clk_alpha_pll_agera_set_rate ( struct clk_hw * hw , unsigned long rate ,
unsigned long prate )
{
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
u32 l , alpha_width = pll_alpha_width ( pll ) ;
int ret ;
unsigned long rrate ;
u64 a ;
rrate = alpha_pll_round_rate ( rate , prate , & l , & a , alpha_width ) ;
ret = alpha_pll_check_rate_margin ( hw , rrate , rate ) ;
if ( ret < 0 )
return ret ;
/* change L_VAL without having to go through the power on sequence */
regmap_write ( pll - > clkr . regmap , PLL_L_VAL ( pll ) , l ) ;
regmap_write ( pll - > clkr . regmap , PLL_ALPHA_VAL ( pll ) , a ) ;
if ( clk_hw_is_enabled ( hw ) )
return wait_for_pll_enable_lock ( pll ) ;
return 0 ;
}
const struct clk_ops clk_alpha_pll_agera_ops = {
. enable = clk_alpha_pll_enable ,
. disable = clk_alpha_pll_disable ,
. is_enabled = clk_alpha_pll_is_enabled ,
. recalc_rate = alpha_pll_fabia_recalc_rate ,
. round_rate = clk_alpha_pll_round_rate ,
. set_rate = clk_alpha_pll_agera_set_rate ,
} ;
EXPORT_SYMBOL_GPL ( clk_alpha_pll_agera_ops ) ;
2021-01-27 10:08:09 +03:00
static int alpha_pll_lucid_5lpe_enable ( struct clk_hw * hw )
{
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
u32 val ;
int ret ;
ret = regmap_read ( pll - > clkr . regmap , PLL_USER_CTL ( pll ) , & val ) ;
if ( ret )
return ret ;
/* If in FSM mode, just vote for it */
if ( val & LUCID_5LPE_ENABLE_VOTE_RUN ) {
ret = clk_enable_regmap ( hw ) ;
if ( ret )
return ret ;
return wait_for_pll_enable_lock ( pll ) ;
}
/* Check if PLL is already enabled, return if enabled */
ret = trion_pll_is_enabled ( pll , pll - > clkr . regmap ) ;
if ( ret < 0 )
return ret ;
ret = regmap_update_bits ( pll - > clkr . regmap , PLL_MODE ( pll ) , PLL_RESET_N , PLL_RESET_N ) ;
if ( ret )
return ret ;
regmap_write ( pll - > clkr . regmap , PLL_OPMODE ( pll ) , PLL_RUN ) ;
ret = wait_for_pll_enable_lock ( pll ) ;
if ( ret )
return ret ;
/* Enable the PLL outputs */
ret = regmap_update_bits ( pll - > clkr . regmap , PLL_USER_CTL ( pll ) , PLL_OUT_MASK , PLL_OUT_MASK ) ;
if ( ret )
return ret ;
/* Enable the global PLL outputs */
return regmap_update_bits ( pll - > clkr . regmap , PLL_MODE ( pll ) , PLL_OUTCTRL , PLL_OUTCTRL ) ;
}
static void alpha_pll_lucid_5lpe_disable ( struct clk_hw * hw )
{
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
u32 val ;
int ret ;
ret = regmap_read ( pll - > clkr . regmap , PLL_USER_CTL ( pll ) , & val ) ;
if ( ret )
return ;
/* If in FSM mode, just unvote it */
if ( val & LUCID_5LPE_ENABLE_VOTE_RUN ) {
clk_disable_regmap ( hw ) ;
return ;
}
/* Disable the global PLL output */
ret = regmap_update_bits ( pll - > clkr . regmap , PLL_MODE ( pll ) , PLL_OUTCTRL , 0 ) ;
if ( ret )
return ;
/* Disable the PLL outputs */
ret = regmap_update_bits ( pll - > clkr . regmap , PLL_USER_CTL ( pll ) , PLL_OUT_MASK , 0 ) ;
if ( ret )
return ;
/* Place the PLL mode in STANDBY */
regmap_write ( pll - > clkr . regmap , PLL_OPMODE ( pll ) , PLL_STANDBY ) ;
}
/*
* The Lucid 5L PE PLL requires a power - on self - calibration which happens
* when the PLL comes out of reset . Calibrate in case it is not completed .
*/
static int alpha_pll_lucid_5lpe_prepare ( struct clk_hw * hw )
{
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
struct clk_hw * p ;
u32 val = 0 ;
int ret ;
/* Return early if calibration is not needed. */
regmap_read ( pll - > clkr . regmap , PLL_MODE ( pll ) , & val ) ;
if ( val & LUCID_5LPE_PCAL_DONE )
return 0 ;
p = clk_hw_get_parent ( hw ) ;
if ( ! p )
return - EINVAL ;
ret = alpha_pll_lucid_5lpe_enable ( hw ) ;
if ( ret )
return ret ;
alpha_pll_lucid_5lpe_disable ( hw ) ;
return 0 ;
}
static int alpha_pll_lucid_5lpe_set_rate ( struct clk_hw * hw , unsigned long rate ,
unsigned long prate )
{
return __alpha_pll_trion_set_rate ( hw , rate , prate ,
LUCID_5LPE_PLL_LATCH_INPUT ,
LUCID_5LPE_ALPHA_PLL_ACK_LATCH ) ;
}
2021-12-07 10:32:50 +03:00
static int __clk_lucid_pll_postdiv_set_rate ( struct clk_hw * hw , unsigned long rate ,
unsigned long parent_rate ,
unsigned long enable_vote_run )
2021-01-27 10:08:09 +03:00
{
struct clk_alpha_pll_postdiv * pll = to_clk_alpha_pll_postdiv ( hw ) ;
2021-12-07 10:32:50 +03:00
struct regmap * regmap = pll - > clkr . regmap ;
int i , val , div , ret ;
2021-01-27 10:08:09 +03:00
u32 mask ;
/*
* If the PLL is in FSM mode , then treat set_rate callback as a
* no - operation .
*/
2021-12-07 10:32:50 +03:00
ret = regmap_read ( regmap , PLL_USER_CTL ( pll ) , & val ) ;
2021-01-27 10:08:09 +03:00
if ( ret )
return ret ;
2021-12-07 10:32:50 +03:00
if ( val & enable_vote_run )
2021-01-27 10:08:09 +03:00
return 0 ;
2021-12-07 10:32:50 +03:00
if ( ! pll - > post_div_table ) {
pr_err ( " Missing the post_div_table for the %s PLL \n " ,
clk_hw_get_name ( & pll - > clkr . hw ) ) ;
return - EINVAL ;
}
2021-01-27 10:08:09 +03:00
div = DIV_ROUND_UP_ULL ( ( u64 ) parent_rate , rate ) ;
for ( i = 0 ; i < pll - > num_post_div ; i + + ) {
if ( pll - > post_div_table [ i ] . div = = div ) {
val = pll - > post_div_table [ i ] . val ;
break ;
}
}
mask = GENMASK ( pll - > width + pll - > post_div_shift - 1 , pll - > post_div_shift ) ;
return regmap_update_bits ( pll - > clkr . regmap , PLL_USER_CTL ( pll ) ,
mask , val < < pll - > post_div_shift ) ;
}
2021-12-07 10:32:50 +03:00
static int clk_lucid_5lpe_pll_postdiv_set_rate ( struct clk_hw * hw , unsigned long rate ,
unsigned long parent_rate )
{
return __clk_lucid_pll_postdiv_set_rate ( hw , rate , parent_rate , LUCID_5LPE_ENABLE_VOTE_RUN ) ;
}
2021-01-27 10:08:09 +03:00
const struct clk_ops clk_alpha_pll_lucid_5lpe_ops = {
. prepare = alpha_pll_lucid_5lpe_prepare ,
. enable = alpha_pll_lucid_5lpe_enable ,
. disable = alpha_pll_lucid_5lpe_disable ,
. is_enabled = clk_trion_pll_is_enabled ,
. recalc_rate = clk_trion_pll_recalc_rate ,
. round_rate = clk_alpha_pll_round_rate ,
. set_rate = alpha_pll_lucid_5lpe_set_rate ,
} ;
2022-07-01 09:27:20 +03:00
EXPORT_SYMBOL_GPL ( clk_alpha_pll_lucid_5lpe_ops ) ;
2021-01-27 10:08:09 +03:00
const struct clk_ops clk_alpha_pll_fixed_lucid_5lpe_ops = {
. enable = alpha_pll_lucid_5lpe_enable ,
. disable = alpha_pll_lucid_5lpe_disable ,
. is_enabled = clk_trion_pll_is_enabled ,
. recalc_rate = clk_trion_pll_recalc_rate ,
. round_rate = clk_alpha_pll_round_rate ,
} ;
2022-07-01 09:27:20 +03:00
EXPORT_SYMBOL_GPL ( clk_alpha_pll_fixed_lucid_5lpe_ops ) ;
2021-01-27 10:08:09 +03:00
const struct clk_ops clk_alpha_pll_postdiv_lucid_5lpe_ops = {
. recalc_rate = clk_alpha_pll_postdiv_fabia_recalc_rate ,
. round_rate = clk_alpha_pll_postdiv_fabia_round_rate ,
. set_rate = clk_lucid_5lpe_pll_postdiv_set_rate ,
} ;
2022-07-01 09:27:20 +03:00
EXPORT_SYMBOL_GPL ( clk_alpha_pll_postdiv_lucid_5lpe_ops ) ;
2021-06-09 05:20:46 +03:00
void clk_zonda_pll_configure ( struct clk_alpha_pll * pll , struct regmap * regmap ,
const struct alpha_pll_config * config )
{
clk_alpha_pll_write_config ( regmap , PLL_L_VAL ( pll ) , config - > l ) ;
clk_alpha_pll_write_config ( regmap , PLL_ALPHA_VAL ( pll ) , config - > alpha ) ;
clk_alpha_pll_write_config ( regmap , PLL_CONFIG_CTL ( pll ) , config - > config_ctl_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_CONFIG_CTL_U ( pll ) , config - > config_ctl_hi_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_CONFIG_CTL_U1 ( pll ) , config - > config_ctl_hi1_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_USER_CTL ( pll ) , config - > user_ctl_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_USER_CTL_U ( pll ) , config - > user_ctl_hi_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_USER_CTL_U1 ( pll ) , config - > user_ctl_hi1_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_TEST_CTL ( pll ) , config - > test_ctl_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_TEST_CTL_U ( pll ) , config - > test_ctl_hi_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_TEST_CTL_U1 ( pll ) , config - > test_ctl_hi1_val ) ;
regmap_update_bits ( regmap , PLL_MODE ( pll ) , PLL_BYPASSNL , 0 ) ;
/* Disable PLL output */
regmap_update_bits ( regmap , PLL_MODE ( pll ) , PLL_OUTCTRL , 0 ) ;
/* Set operation mode to OFF */
regmap_write ( regmap , PLL_OPMODE ( pll ) , PLL_STANDBY ) ;
/* Place the PLL in STANDBY mode */
regmap_update_bits ( regmap , PLL_MODE ( pll ) , PLL_RESET_N , PLL_RESET_N ) ;
}
EXPORT_SYMBOL_GPL ( clk_zonda_pll_configure ) ;
static int clk_zonda_pll_enable ( struct clk_hw * hw )
{
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
struct regmap * regmap = pll - > clkr . regmap ;
u32 val ;
int ret ;
regmap_read ( regmap , PLL_MODE ( pll ) , & val ) ;
/* If in FSM mode, just vote for it */
if ( val & PLL_VOTE_FSM_ENA ) {
ret = clk_enable_regmap ( hw ) ;
if ( ret )
return ret ;
return wait_for_pll_enable_active ( pll ) ;
}
/* Get the PLL out of bypass mode */
regmap_update_bits ( regmap , PLL_MODE ( pll ) , PLL_BYPASSNL , PLL_BYPASSNL ) ;
/*
* H / W requires a 1u s delay between disabling the bypass and
* de - asserting the reset .
*/
udelay ( 1 ) ;
regmap_update_bits ( regmap , PLL_MODE ( pll ) , PLL_RESET_N , PLL_RESET_N ) ;
/* Set operation mode to RUN */
regmap_write ( regmap , PLL_OPMODE ( pll ) , PLL_RUN ) ;
regmap_read ( regmap , PLL_TEST_CTL ( pll ) , & val ) ;
/* If cfa mode then poll for freq lock */
if ( val & ZONDA_STAY_IN_CFA )
ret = wait_for_zonda_pll_freq_lock ( pll ) ;
else
ret = wait_for_pll_enable_lock ( pll ) ;
if ( ret )
return ret ;
/* Enable the PLL outputs */
regmap_update_bits ( regmap , PLL_USER_CTL ( pll ) , ZONDA_PLL_OUT_MASK , ZONDA_PLL_OUT_MASK ) ;
/* Enable the global PLL outputs */
regmap_update_bits ( regmap , PLL_MODE ( pll ) , PLL_OUTCTRL , PLL_OUTCTRL ) ;
return 0 ;
}
static void clk_zonda_pll_disable ( struct clk_hw * hw )
{
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
struct regmap * regmap = pll - > clkr . regmap ;
u32 val ;
regmap_read ( regmap , PLL_MODE ( pll ) , & val ) ;
/* If in FSM mode, just unvote it */
if ( val & PLL_VOTE_FSM_ENA ) {
clk_disable_regmap ( hw ) ;
return ;
}
/* Disable the global PLL output */
regmap_update_bits ( regmap , PLL_MODE ( pll ) , PLL_OUTCTRL , 0 ) ;
/* Disable the PLL outputs */
regmap_update_bits ( regmap , PLL_USER_CTL ( pll ) , ZONDA_PLL_OUT_MASK , 0 ) ;
/* Put the PLL in bypass and reset */
regmap_update_bits ( regmap , PLL_MODE ( pll ) , PLL_RESET_N | PLL_BYPASSNL , 0 ) ;
/* Place the PLL mode in OFF state */
regmap_write ( regmap , PLL_OPMODE ( pll ) , 0x0 ) ;
}
static int clk_zonda_pll_set_rate ( struct clk_hw * hw , unsigned long rate ,
unsigned long prate )
{
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
unsigned long rrate ;
u32 test_ctl_val ;
u32 l , alpha_width = pll_alpha_width ( pll ) ;
u64 a ;
int ret ;
rrate = alpha_pll_round_rate ( rate , prate , & l , & a , alpha_width ) ;
ret = alpha_pll_check_rate_margin ( hw , rrate , rate ) ;
if ( ret < 0 )
return ret ;
regmap_write ( pll - > clkr . regmap , PLL_ALPHA_VAL ( pll ) , a ) ;
regmap_write ( pll - > clkr . regmap , PLL_L_VAL ( pll ) , l ) ;
/* Wait before polling for the frequency latch */
udelay ( 5 ) ;
/* Read stay in cfa mode */
regmap_read ( pll - > clkr . regmap , PLL_TEST_CTL ( pll ) , & test_ctl_val ) ;
/* If cfa mode then poll for freq lock */
if ( test_ctl_val & ZONDA_STAY_IN_CFA )
ret = wait_for_zonda_pll_freq_lock ( pll ) ;
else
ret = wait_for_pll_enable_lock ( pll ) ;
if ( ret )
return ret ;
/* Wait for PLL output to stabilize */
udelay ( 100 ) ;
return 0 ;
}
const struct clk_ops clk_alpha_pll_zonda_ops = {
. enable = clk_zonda_pll_enable ,
. disable = clk_zonda_pll_disable ,
. is_enabled = clk_trion_pll_is_enabled ,
. recalc_rate = clk_trion_pll_recalc_rate ,
. round_rate = clk_alpha_pll_round_rate ,
. set_rate = clk_zonda_pll_set_rate ,
} ;
2022-07-01 09:27:20 +03:00
EXPORT_SYMBOL_GPL ( clk_alpha_pll_zonda_ops ) ;
2021-12-07 10:32:50 +03:00
2022-07-01 09:27:29 +03:00
void clk_lucid_evo_pll_configure ( struct clk_alpha_pll * pll , struct regmap * regmap ,
const struct alpha_pll_config * config )
{
u32 lval = config - > l ;
lval | = TRION_PLL_CAL_VAL < < LUCID_EVO_PLL_CAL_L_VAL_SHIFT ;
clk_alpha_pll_write_config ( regmap , PLL_L_VAL ( pll ) , lval ) ;
clk_alpha_pll_write_config ( regmap , PLL_ALPHA_VAL ( pll ) , config - > alpha ) ;
clk_alpha_pll_write_config ( regmap , PLL_CONFIG_CTL ( pll ) , config - > config_ctl_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_CONFIG_CTL_U ( pll ) , config - > config_ctl_hi_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_CONFIG_CTL_U1 ( pll ) , config - > config_ctl_hi1_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_USER_CTL ( pll ) , config - > user_ctl_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_USER_CTL_U ( pll ) , config - > user_ctl_hi_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_TEST_CTL ( pll ) , config - > test_ctl_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_TEST_CTL_U ( pll ) , config - > test_ctl_hi_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_TEST_CTL_U1 ( pll ) , config - > test_ctl_hi1_val ) ;
2023-05-24 17:52:00 +03:00
clk_alpha_pll_write_config ( regmap , PLL_TEST_CTL_U2 ( pll ) , config - > test_ctl_hi2_val ) ;
2022-07-01 09:27:29 +03:00
/* Disable PLL output */
regmap_update_bits ( regmap , PLL_MODE ( pll ) , PLL_OUTCTRL , 0 ) ;
/* Set operation mode to STANDBY and de-assert the reset */
regmap_write ( regmap , PLL_OPMODE ( pll ) , PLL_STANDBY ) ;
regmap_update_bits ( regmap , PLL_MODE ( pll ) , PLL_RESET_N , PLL_RESET_N ) ;
}
EXPORT_SYMBOL_GPL ( clk_lucid_evo_pll_configure ) ;
2023-07-07 06:57:41 +03:00
void clk_lucid_ole_pll_configure ( struct clk_alpha_pll * pll , struct regmap * regmap ,
const struct alpha_pll_config * config )
{
u32 lval = config - > l ;
lval | = TRION_PLL_CAL_VAL < < LUCID_EVO_PLL_CAL_L_VAL_SHIFT ;
lval | = TRION_PLL_CAL_VAL < < LUCID_OLE_PLL_RINGOSC_CAL_L_VAL_SHIFT ;
clk_alpha_pll_write_config ( regmap , PLL_L_VAL ( pll ) , lval ) ;
clk_alpha_pll_write_config ( regmap , PLL_ALPHA_VAL ( pll ) , config - > alpha ) ;
clk_alpha_pll_write_config ( regmap , PLL_CONFIG_CTL ( pll ) , config - > config_ctl_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_CONFIG_CTL_U ( pll ) , config - > config_ctl_hi_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_CONFIG_CTL_U1 ( pll ) , config - > config_ctl_hi1_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_USER_CTL ( pll ) , config - > user_ctl_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_USER_CTL_U ( pll ) , config - > user_ctl_hi_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_TEST_CTL ( pll ) , config - > test_ctl_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_TEST_CTL_U ( pll ) , config - > test_ctl_hi_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_TEST_CTL_U1 ( pll ) , config - > test_ctl_hi1_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_TEST_CTL_U2 ( pll ) , config - > test_ctl_hi2_val ) ;
/* Disable PLL output */
regmap_update_bits ( regmap , PLL_MODE ( pll ) , PLL_OUTCTRL , 0 ) ;
/* Set operation mode to STANDBY and de-assert the reset */
regmap_write ( regmap , PLL_OPMODE ( pll ) , PLL_STANDBY ) ;
regmap_update_bits ( regmap , PLL_MODE ( pll ) , PLL_RESET_N , PLL_RESET_N ) ;
}
EXPORT_SYMBOL_GPL ( clk_lucid_ole_pll_configure ) ;
2021-12-07 10:32:50 +03:00
static int alpha_pll_lucid_evo_enable ( struct clk_hw * hw )
{
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
struct regmap * regmap = pll - > clkr . regmap ;
u32 val ;
int ret ;
ret = regmap_read ( regmap , PLL_USER_CTL ( pll ) , & val ) ;
if ( ret )
return ret ;
/* If in FSM mode, just vote for it */
if ( val & LUCID_EVO_ENABLE_VOTE_RUN ) {
ret = clk_enable_regmap ( hw ) ;
if ( ret )
return ret ;
return wait_for_pll_enable_lock ( pll ) ;
}
/* Check if PLL is already enabled */
ret = trion_pll_is_enabled ( pll , regmap ) ;
if ( ret < 0 ) {
return ret ;
} else if ( ret ) {
pr_warn ( " %s PLL is already enabled \n " , clk_hw_get_name ( & pll - > clkr . hw ) ) ;
return 0 ;
}
ret = regmap_update_bits ( regmap , PLL_MODE ( pll ) , PLL_RESET_N , PLL_RESET_N ) ;
if ( ret )
return ret ;
/* Set operation mode to RUN */
regmap_write ( regmap , PLL_OPMODE ( pll ) , PLL_RUN ) ;
ret = wait_for_pll_enable_lock ( pll ) ;
if ( ret )
return ret ;
/* Enable the PLL outputs */
ret = regmap_update_bits ( regmap , PLL_USER_CTL ( pll ) , PLL_OUT_MASK , PLL_OUT_MASK ) ;
if ( ret )
return ret ;
/* Enable the global PLL outputs */
ret = regmap_update_bits ( regmap , PLL_MODE ( pll ) , PLL_OUTCTRL , PLL_OUTCTRL ) ;
if ( ret )
return ret ;
/* Ensure that the write above goes through before returning. */
mb ( ) ;
return ret ;
}
2022-09-09 01:28:48 +03:00
static void _alpha_pll_lucid_evo_disable ( struct clk_hw * hw , bool reset )
2021-12-07 10:32:50 +03:00
{
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
struct regmap * regmap = pll - > clkr . regmap ;
u32 val ;
int ret ;
ret = regmap_read ( regmap , PLL_USER_CTL ( pll ) , & val ) ;
if ( ret )
return ;
/* If in FSM mode, just unvote it */
if ( val & LUCID_EVO_ENABLE_VOTE_RUN ) {
clk_disable_regmap ( hw ) ;
return ;
}
/* Disable the global PLL output */
ret = regmap_update_bits ( regmap , PLL_MODE ( pll ) , PLL_OUTCTRL , 0 ) ;
if ( ret )
return ;
/* Disable the PLL outputs */
ret = regmap_update_bits ( regmap , PLL_USER_CTL ( pll ) , PLL_OUT_MASK , 0 ) ;
if ( ret )
return ;
/* Place the PLL mode in STANDBY */
regmap_write ( regmap , PLL_OPMODE ( pll ) , PLL_STANDBY ) ;
2022-09-09 01:28:48 +03:00
if ( reset )
regmap_update_bits ( regmap , PLL_MODE ( pll ) , PLL_RESET_N , 0 ) ;
2021-12-07 10:32:50 +03:00
}
2022-09-09 01:28:48 +03:00
static int _alpha_pll_lucid_evo_prepare ( struct clk_hw * hw , bool reset )
2022-07-01 09:27:29 +03:00
{
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
struct clk_hw * p ;
u32 val = 0 ;
int ret ;
/* Return early if calibration is not needed. */
regmap_read ( pll - > clkr . regmap , PLL_MODE ( pll ) , & val ) ;
if ( ! ( val & LUCID_EVO_PCAL_NOT_DONE ) )
return 0 ;
p = clk_hw_get_parent ( hw ) ;
if ( ! p )
return - EINVAL ;
ret = alpha_pll_lucid_evo_enable ( hw ) ;
if ( ret )
return ret ;
2022-09-09 01:28:48 +03:00
_alpha_pll_lucid_evo_disable ( hw , reset ) ;
2022-07-01 09:27:29 +03:00
return 0 ;
}
2022-09-09 01:28:48 +03:00
static void alpha_pll_lucid_evo_disable ( struct clk_hw * hw )
{
_alpha_pll_lucid_evo_disable ( hw , false ) ;
}
static int alpha_pll_lucid_evo_prepare ( struct clk_hw * hw )
{
return _alpha_pll_lucid_evo_prepare ( hw , false ) ;
}
static void alpha_pll_reset_lucid_evo_disable ( struct clk_hw * hw )
{
_alpha_pll_lucid_evo_disable ( hw , true ) ;
}
static int alpha_pll_reset_lucid_evo_prepare ( struct clk_hw * hw )
{
return _alpha_pll_lucid_evo_prepare ( hw , true ) ;
}
2021-12-07 10:32:50 +03:00
static unsigned long alpha_pll_lucid_evo_recalc_rate ( struct clk_hw * hw ,
unsigned long parent_rate )
{
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
struct regmap * regmap = pll - > clkr . regmap ;
u32 l , frac ;
regmap_read ( regmap , PLL_L_VAL ( pll ) , & l ) ;
l & = LUCID_EVO_PLL_L_VAL_MASK ;
regmap_read ( regmap , PLL_ALPHA_VAL ( pll ) , & frac ) ;
return alpha_pll_calc_rate ( parent_rate , l , frac , pll_alpha_width ( pll ) ) ;
}
static int clk_lucid_evo_pll_postdiv_set_rate ( struct clk_hw * hw , unsigned long rate ,
unsigned long parent_rate )
{
return __clk_lucid_pll_postdiv_set_rate ( hw , rate , parent_rate , LUCID_EVO_ENABLE_VOTE_RUN ) ;
}
const struct clk_ops clk_alpha_pll_fixed_lucid_evo_ops = {
. enable = alpha_pll_lucid_evo_enable ,
. disable = alpha_pll_lucid_evo_disable ,
. is_enabled = clk_trion_pll_is_enabled ,
. recalc_rate = alpha_pll_lucid_evo_recalc_rate ,
. round_rate = clk_alpha_pll_round_rate ,
} ;
EXPORT_SYMBOL_GPL ( clk_alpha_pll_fixed_lucid_evo_ops ) ;
const struct clk_ops clk_alpha_pll_postdiv_lucid_evo_ops = {
. recalc_rate = clk_alpha_pll_postdiv_fabia_recalc_rate ,
. round_rate = clk_alpha_pll_postdiv_fabia_round_rate ,
. set_rate = clk_lucid_evo_pll_postdiv_set_rate ,
} ;
EXPORT_SYMBOL_GPL ( clk_alpha_pll_postdiv_lucid_evo_ops ) ;
2022-07-01 09:27:29 +03:00
const struct clk_ops clk_alpha_pll_lucid_evo_ops = {
. prepare = alpha_pll_lucid_evo_prepare ,
. enable = alpha_pll_lucid_evo_enable ,
. disable = alpha_pll_lucid_evo_disable ,
. is_enabled = clk_trion_pll_is_enabled ,
. recalc_rate = alpha_pll_lucid_evo_recalc_rate ,
. round_rate = clk_alpha_pll_round_rate ,
. set_rate = alpha_pll_lucid_5lpe_set_rate ,
} ;
EXPORT_SYMBOL_GPL ( clk_alpha_pll_lucid_evo_ops ) ;
2022-07-01 09:27:39 +03:00
2022-09-09 01:28:48 +03:00
const struct clk_ops clk_alpha_pll_reset_lucid_evo_ops = {
. prepare = alpha_pll_reset_lucid_evo_prepare ,
. enable = alpha_pll_lucid_evo_enable ,
. disable = alpha_pll_reset_lucid_evo_disable ,
. is_enabled = clk_trion_pll_is_enabled ,
. recalc_rate = alpha_pll_lucid_evo_recalc_rate ,
. round_rate = clk_alpha_pll_round_rate ,
. set_rate = alpha_pll_lucid_5lpe_set_rate ,
} ;
EXPORT_SYMBOL_GPL ( clk_alpha_pll_reset_lucid_evo_ops ) ;
2022-07-01 09:27:39 +03:00
void clk_rivian_evo_pll_configure ( struct clk_alpha_pll * pll , struct regmap * regmap ,
const struct alpha_pll_config * config )
{
clk_alpha_pll_write_config ( regmap , PLL_CONFIG_CTL ( pll ) , config - > config_ctl_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_CONFIG_CTL_U ( pll ) , config - > config_ctl_hi_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_CONFIG_CTL_U1 ( pll ) , config - > config_ctl_hi1_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_TEST_CTL ( pll ) , config - > test_ctl_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_TEST_CTL_U ( pll ) , config - > test_ctl_hi_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_L_VAL ( pll ) , config - > l ) ;
clk_alpha_pll_write_config ( regmap , PLL_USER_CTL ( pll ) , config - > user_ctl_val ) ;
clk_alpha_pll_write_config ( regmap , PLL_USER_CTL_U ( pll ) , config - > user_ctl_hi_val ) ;
regmap_write ( regmap , PLL_OPMODE ( pll ) , PLL_STANDBY ) ;
regmap_update_bits ( regmap , PLL_MODE ( pll ) ,
PLL_RESET_N | PLL_BYPASSNL | PLL_OUTCTRL ,
PLL_RESET_N | PLL_BYPASSNL ) ;
}
EXPORT_SYMBOL_GPL ( clk_rivian_evo_pll_configure ) ;
static unsigned long clk_rivian_evo_pll_recalc_rate ( struct clk_hw * hw ,
unsigned long parent_rate )
{
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
u32 l ;
regmap_read ( pll - > clkr . regmap , PLL_L_VAL ( pll ) , & l ) ;
return parent_rate * l ;
}
static long clk_rivian_evo_pll_round_rate ( struct clk_hw * hw , unsigned long rate ,
unsigned long * prate )
{
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
unsigned long min_freq , max_freq ;
u32 l ;
u64 a ;
rate = alpha_pll_round_rate ( rate , * prate , & l , & a , 0 ) ;
if ( ! pll - > vco_table | | alpha_pll_find_vco ( pll , rate ) )
return rate ;
min_freq = pll - > vco_table [ 0 ] . min_freq ;
max_freq = pll - > vco_table [ pll - > num_vco - 1 ] . max_freq ;
return clamp ( rate , min_freq , max_freq ) ;
}
const struct clk_ops clk_alpha_pll_rivian_evo_ops = {
. enable = alpha_pll_lucid_5lpe_enable ,
. disable = alpha_pll_lucid_5lpe_disable ,
. is_enabled = clk_trion_pll_is_enabled ,
. recalc_rate = clk_rivian_evo_pll_recalc_rate ,
. round_rate = clk_rivian_evo_pll_round_rate ,
} ;
EXPORT_SYMBOL_GPL ( clk_alpha_pll_rivian_evo_ops ) ;
2023-03-07 09:22:24 +03:00
void clk_stromer_pll_configure ( struct clk_alpha_pll * pll , struct regmap * regmap ,
const struct alpha_pll_config * config )
{
u32 val , val_u , mask , mask_u ;
regmap_write ( regmap , PLL_L_VAL ( pll ) , config - > l ) ;
regmap_write ( regmap , PLL_ALPHA_VAL ( pll ) , config - > alpha ) ;
regmap_write ( regmap , PLL_CONFIG_CTL ( pll ) , config - > config_ctl_val ) ;
if ( pll_has_64bit_config ( pll ) )
regmap_write ( regmap , PLL_CONFIG_CTL_U ( pll ) ,
config - > config_ctl_hi_val ) ;
if ( pll_alpha_width ( pll ) > 32 )
regmap_write ( regmap , PLL_ALPHA_VAL_U ( pll ) , config - > alpha_hi ) ;
val = config - > main_output_mask ;
val | = config - > aux_output_mask ;
val | = config - > aux2_output_mask ;
val | = config - > early_output_mask ;
val | = config - > pre_div_val ;
val | = config - > post_div_val ;
val | = config - > vco_val ;
val | = config - > alpha_en_mask ;
val | = config - > alpha_mode_mask ;
mask = config - > main_output_mask ;
mask | = config - > aux_output_mask ;
mask | = config - > aux2_output_mask ;
mask | = config - > early_output_mask ;
mask | = config - > pre_div_mask ;
mask | = config - > post_div_mask ;
mask | = config - > vco_mask ;
mask | = config - > alpha_en_mask ;
mask | = config - > alpha_mode_mask ;
regmap_update_bits ( regmap , PLL_USER_CTL ( pll ) , mask , val ) ;
/* Stromer APSS PLL does not enable LOCK_DET by default, so enable it */
val_u = config - > status_val < < ALPHA_PLL_STATUS_REG_SHIFT ;
val_u | = config - > lock_det ;
mask_u = config - > status_mask ;
mask_u | = config - > lock_det ;
regmap_update_bits ( regmap , PLL_USER_CTL_U ( pll ) , mask_u , val_u ) ;
regmap_write ( regmap , PLL_TEST_CTL ( pll ) , config - > test_ctl_val ) ;
regmap_write ( regmap , PLL_TEST_CTL_U ( pll ) , config - > test_ctl_hi_val ) ;
if ( pll - > flags & SUPPORTS_FSM_MODE )
qcom_pll_set_fsm_mode ( regmap , PLL_MODE ( pll ) , 6 , 0 ) ;
}
EXPORT_SYMBOL_GPL ( clk_stromer_pll_configure ) ;
static int clk_alpha_pll_stromer_determine_rate ( struct clk_hw * hw ,
struct clk_rate_request * req )
{
u32 l ;
u64 a ;
req - > rate = alpha_pll_round_rate ( req - > rate , req - > best_parent_rate ,
& l , & a , ALPHA_REG_BITWIDTH ) ;
return 0 ;
}
static int clk_alpha_pll_stromer_set_rate ( struct clk_hw * hw , unsigned long rate ,
unsigned long prate )
{
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
int ret ;
u32 l ;
u64 a ;
rate = alpha_pll_round_rate ( rate , prate , & l , & a , ALPHA_REG_BITWIDTH ) ;
regmap_write ( pll - > clkr . regmap , PLL_L_VAL ( pll ) , l ) ;
regmap_write ( pll - > clkr . regmap , PLL_ALPHA_VAL ( pll ) , a ) ;
regmap_write ( pll - > clkr . regmap , PLL_ALPHA_VAL_U ( pll ) ,
a > > ALPHA_BITWIDTH ) ;
regmap_update_bits ( pll - > clkr . regmap , PLL_USER_CTL ( pll ) ,
PLL_ALPHA_EN , PLL_ALPHA_EN ) ;
if ( ! clk_hw_is_enabled ( hw ) )
return 0 ;
/*
* Stromer PLL supports Dynamic programming .
* It allows the PLL frequency to be changed on - the - fly without first
* execution of a shutdown procedure followed by a bring up procedure .
*/
regmap_update_bits ( pll - > clkr . regmap , PLL_MODE ( pll ) , PLL_UPDATE ,
PLL_UPDATE ) ;
ret = wait_for_pll_update ( pll ) ;
if ( ret )
return ret ;
return wait_for_pll_enable_lock ( pll ) ;
}
const struct clk_ops clk_alpha_pll_stromer_ops = {
. enable = clk_alpha_pll_enable ,
. disable = clk_alpha_pll_disable ,
. is_enabled = clk_alpha_pll_is_enabled ,
. recalc_rate = clk_alpha_pll_recalc_rate ,
. determine_rate = clk_alpha_pll_stromer_determine_rate ,
. set_rate = clk_alpha_pll_stromer_set_rate ,
} ;
EXPORT_SYMBOL_GPL ( clk_alpha_pll_stromer_ops ) ;
2023-10-20 09:19:32 +03:00
static int clk_alpha_pll_stromer_plus_set_rate ( struct clk_hw * hw ,
unsigned long rate ,
unsigned long prate )
{
struct clk_alpha_pll * pll = to_clk_alpha_pll ( hw ) ;
u32 l , alpha_width = pll_alpha_width ( pll ) ;
int ret , pll_mode ;
u64 a ;
rate = alpha_pll_round_rate ( rate , prate , & l , & a , alpha_width ) ;
ret = regmap_read ( pll - > clkr . regmap , PLL_MODE ( pll ) , & pll_mode ) ;
if ( ret )
return ret ;
regmap_write ( pll - > clkr . regmap , PLL_MODE ( pll ) , 0 ) ;
/* Delay of 2 output clock ticks required until output is disabled */
udelay ( 1 ) ;
regmap_write ( pll - > clkr . regmap , PLL_L_VAL ( pll ) , l ) ;
if ( alpha_width > ALPHA_BITWIDTH )
a < < = alpha_width - ALPHA_BITWIDTH ;
regmap_write ( pll - > clkr . regmap , PLL_ALPHA_VAL ( pll ) , a ) ;
regmap_write ( pll - > clkr . regmap , PLL_ALPHA_VAL_U ( pll ) ,
a > > ALPHA_BITWIDTH ) ;
regmap_write ( pll - > clkr . regmap , PLL_MODE ( pll ) , PLL_BYPASSNL ) ;
/* Wait five micro seconds or more */
udelay ( 5 ) ;
regmap_update_bits ( pll - > clkr . regmap , PLL_MODE ( pll ) , PLL_RESET_N ,
PLL_RESET_N ) ;
/* The lock time should be less than 50 micro seconds worst case */
usleep_range ( 50 , 60 ) ;
ret = wait_for_pll_enable_lock ( pll ) ;
if ( ret ) {
pr_err ( " Wait for PLL enable lock failed [%s] %d \n " ,
clk_hw_get_name ( hw ) , ret ) ;
return ret ;
}
if ( pll_mode & PLL_OUTCTRL )
regmap_update_bits ( pll - > clkr . regmap , PLL_MODE ( pll ) , PLL_OUTCTRL ,
PLL_OUTCTRL ) ;
return 0 ;
}
const struct clk_ops clk_alpha_pll_stromer_plus_ops = {
. prepare = clk_alpha_pll_enable ,
. unprepare = clk_alpha_pll_disable ,
. is_enabled = clk_alpha_pll_is_enabled ,
. recalc_rate = clk_alpha_pll_recalc_rate ,
. determine_rate = clk_alpha_pll_stromer_determine_rate ,
. set_rate = clk_alpha_pll_stromer_plus_set_rate ,
} ;
EXPORT_SYMBOL_GPL ( clk_alpha_pll_stromer_plus_ops ) ;