2019-05-27 09:55:21 +03:00
// SPDX-License-Identifier: GPL-2.0-only
2011-12-07 20:45:25 +04:00
/*
2020-05-28 00:59:40 +03:00
* drivers / media / i2c / ccs - pll . c
2011-12-07 20:45:25 +04:00
*
2020-05-28 00:59:40 +03:00
* Generic MIPI CCS / SMIA / SMIA + + PLL calculator
2011-12-07 20:45:25 +04:00
*
2020-05-28 00:59:40 +03:00
* Copyright ( C ) 2020 Intel Corporation
2011-12-07 20:45:25 +04:00
* Copyright ( C ) 2011 - - 2012 Nokia Corporation
2020-06-25 00:57:46 +03:00
* Contact : Sakari Ailus < sakari . ailus @ linux . intel . com >
2011-12-07 20:45:25 +04:00
*/
2014-10-18 13:16:20 +04:00
# include <linux/device.h>
2011-12-07 20:45:25 +04:00
# include <linux/gcd.h>
# include <linux/lcm.h>
# include <linux/module.h>
2020-05-28 00:59:40 +03:00
# include "ccs-pll.h"
2011-12-07 20:45:25 +04:00
/* Return an even number or one. */
static inline uint32_t clk_div_even ( uint32_t a )
{
return max_t ( uint32_t , 1 , a & ~ 1 ) ;
}
/* Return an even number or one. */
static inline uint32_t clk_div_even_up ( uint32_t a )
{
if ( a = = 1 )
return 1 ;
return ( a + 1 ) & ~ 1 ;
}
static inline uint32_t is_one_or_even ( uint32_t a )
{
if ( a = = 1 )
return 1 ;
if ( a & 1 )
return 0 ;
return 1 ;
}
2020-07-07 11:31:56 +03:00
static inline uint32_t one_or_more ( uint32_t a )
{
return a ? : 1 ;
}
2011-12-07 20:45:25 +04:00
static int bounds_check ( struct device * dev , uint32_t val ,
2020-09-15 11:37:32 +03:00
uint32_t min , uint32_t max , const char * prefix ,
char * str )
2011-12-07 20:45:25 +04:00
{
if ( val > = min & & val < = max )
return 0 ;
2020-09-15 11:37:32 +03:00
dev_dbg ( dev , " %s_%s out of bounds: %d (%d--%d) \n " , prefix ,
str , val , min , max ) ;
2011-12-07 20:45:25 +04:00
return - EINVAL ;
}
2020-09-09 15:15:48 +03:00
# define PLL_OP 1
# define PLL_VT 2
static const char * pll_string ( unsigned int which )
2011-12-07 20:45:25 +04:00
{
2020-09-09 15:15:48 +03:00
switch ( which ) {
case PLL_OP :
return " op " ;
case PLL_VT :
return " vt " ;
2011-12-07 20:45:25 +04:00
}
2020-09-09 15:15:48 +03:00
return NULL ;
}
# define PLL_FL(f) CCS_PLL_FLAG_##f
static void print_pll ( struct device * dev , struct ccs_pll * pll )
{
const struct {
struct ccs_pll_branch_fr * fr ;
struct ccs_pll_branch_bk * bk ;
unsigned int which ;
} branches [ ] = {
{ & pll - > vt_fr , & pll - > vt_bk , PLL_VT } ,
2020-09-15 11:37:32 +03:00
{ & pll - > op_fr , & pll - > op_bk , PLL_OP }
2020-09-09 15:15:48 +03:00
} , * br ;
unsigned int i ;
dev_dbg ( dev , " ext_clk_freq_hz \t \t %u \n " , pll - > ext_clk_freq_hz ) ;
for ( i = 0 , br = branches ; i < ARRAY_SIZE ( branches ) ; i + + , br + + ) {
const char * s = pll_string ( br - > which ) ;
if ( br - > which = = PLL_VT ) {
dev_dbg ( dev , " %s_pre_pll_clk_div \t \t %u \n " , s ,
br - > fr - > pre_pll_clk_div ) ;
dev_dbg ( dev , " %s_pll_multiplier \t \t %u \n " , s ,
br - > fr - > pll_multiplier ) ;
dev_dbg ( dev , " %s_pll_ip_clk_freq_hz \t %u \n " , s ,
br - > fr - > pll_ip_clk_freq_hz ) ;
dev_dbg ( dev , " %s_pll_op_clk_freq_hz \t %u \n " , s ,
br - > fr - > pll_op_clk_freq_hz ) ;
}
if ( ! ( pll - > flags & CCS_PLL_FLAG_NO_OP_CLOCKS ) | |
br - > which = = PLL_VT ) {
dev_dbg ( dev , " %s_sys_clk_div \t \t %u \n " , s ,
br - > bk - > sys_clk_div ) ;
dev_dbg ( dev , " %s_pix_clk_div \t \t %u \n " , s ,
br - > bk - > pix_clk_div ) ;
dev_dbg ( dev , " %s_sys_clk_freq_hz \t %u \n " , s ,
br - > bk - > sys_clk_freq_hz ) ;
dev_dbg ( dev , " %s_pix_clk_freq_hz \t %u \n " , s ,
br - > bk - > pix_clk_freq_hz ) ;
}
2011-12-07 20:45:25 +04:00
}
2020-09-09 15:15:48 +03:00
dev_dbg ( dev , " flags%s%s%s%s%s%s \n " ,
pll - > flags & PLL_FL ( LANE_SPEED_MODEL ) ? " lane-speed " : " " ,
pll - > flags & PLL_FL ( LINK_DECOUPLED ) ? " link-decoupled " : " " ,
pll - > flags & PLL_FL ( EXT_IP_PLL_DIVIDER ) ?
" ext-ip-pll-divider " : " " ,
pll - > flags & PLL_FL ( FLEXIBLE_OP_PIX_CLK_DIV ) ?
" flexible-op-pix-div " : " " ,
pll - > flags & PLL_FL ( FIFO_DERATING ) ? " fifo-derating " : " " ,
pll - > flags & PLL_FL ( FIFO_OVERRATING ) ? " fifo-overrating " : " " ) ;
2011-12-07 20:45:25 +04:00
}
2020-09-15 11:37:32 +03:00
static int check_fr_bounds ( struct device * dev ,
const struct ccs_pll_limits * lim ,
struct ccs_pll * pll , unsigned int which )
2014-09-16 01:35:18 +04:00
{
2020-09-15 11:37:32 +03:00
const struct ccs_pll_branch_limits_fr * lim_fr ;
struct ccs_pll_branch_fr * pll_fr ;
const char * s = pll_string ( which ) ;
2014-09-16 01:35:18 +04:00
int rval ;
2020-09-15 11:37:32 +03:00
if ( which = = PLL_OP ) {
lim_fr = & lim - > op_fr ;
pll_fr = & pll - > op_fr ;
} else {
lim_fr = & lim - > vt_fr ;
pll_fr = & pll - > vt_fr ;
}
rval = bounds_check ( dev , pll_fr - > pre_pll_clk_div ,
lim_fr - > min_pre_pll_clk_div ,
lim_fr - > max_pre_pll_clk_div , s , " pre_pll_clk_div " ) ;
2014-09-16 01:35:18 +04:00
if ( ! rval )
2020-09-15 11:37:32 +03:00
rval = bounds_check ( dev , pll_fr - > pll_ip_clk_freq_hz ,
lim_fr - > min_pll_ip_clk_freq_hz ,
lim_fr - > max_pll_ip_clk_freq_hz ,
s , " pll_ip_clk_freq_hz " ) ;
2014-09-16 01:35:18 +04:00
if ( ! rval )
2020-09-15 11:37:32 +03:00
rval = bounds_check ( dev , pll_fr - > pll_multiplier ,
lim_fr - > min_pll_multiplier ,
lim_fr - > max_pll_multiplier ,
s , " pll_multiplier " ) ;
2014-09-16 01:35:18 +04:00
if ( ! rval )
2020-09-15 11:37:32 +03:00
rval = bounds_check ( dev , pll_fr - > pll_op_clk_freq_hz ,
lim_fr - > min_pll_op_clk_freq_hz ,
lim_fr - > max_pll_op_clk_freq_hz ,
s , " pll_op_clk_freq_hz " ) ;
2014-09-16 01:47:32 +04:00
2020-09-15 11:37:32 +03:00
return rval ;
}
static int check_bk_bounds ( struct device * dev ,
const struct ccs_pll_limits * lim ,
struct ccs_pll * pll , unsigned int which )
{
const struct ccs_pll_branch_limits_bk * lim_bk ;
struct ccs_pll_branch_bk * pll_bk ;
const char * s = pll_string ( which ) ;
int rval ;
if ( which = = PLL_OP ) {
if ( pll - > flags & CCS_PLL_FLAG_NO_OP_CLOCKS )
return 0 ;
2014-09-16 01:47:32 +04:00
2020-09-15 11:37:32 +03:00
lim_bk = & lim - > op_bk ;
pll_bk = & pll - > op_bk ;
} else {
lim_bk = & lim - > vt_bk ;
pll_bk = & pll - > vt_bk ;
}
rval = bounds_check ( dev , pll_bk - > sys_clk_div ,
lim_bk - > min_sys_clk_div ,
lim_bk - > max_sys_clk_div , s , " op_sys_clk_div " ) ;
if ( ! rval )
rval = bounds_check ( dev , pll_bk - > sys_clk_freq_hz ,
lim_bk - > min_sys_clk_freq_hz ,
lim_bk - > max_sys_clk_freq_hz ,
s , " sys_clk_freq_hz " ) ;
2014-09-16 01:35:18 +04:00
if ( ! rval )
2020-09-15 11:37:32 +03:00
rval = bounds_check ( dev , pll_bk - > sys_clk_div ,
lim_bk - > min_sys_clk_div ,
lim_bk - > max_sys_clk_div ,
s , " sys_clk_div " ) ;
2014-09-16 01:35:18 +04:00
if ( ! rval )
2020-09-15 11:37:32 +03:00
rval = bounds_check ( dev , pll_bk - > pix_clk_freq_hz ,
lim_bk - > min_pix_clk_freq_hz ,
lim_bk - > max_pix_clk_freq_hz ,
s , " pix_clk_freq_hz " ) ;
return rval ;
}
2014-09-16 01:35:18 +04:00
2020-09-15 11:37:32 +03:00
static int check_ext_bounds ( struct device * dev , struct ccs_pll * pll )
{
2020-08-28 09:24:18 +03:00
if ( ! ( pll - > flags & CCS_PLL_FLAG_FIFO_DERATING ) & &
pll - > pixel_rate_pixel_array > pll - > pixel_rate_csi ) {
dev_dbg ( dev , " device does not support derating \n " ) ;
return - EINVAL ;
}
if ( ! ( pll - > flags & CCS_PLL_FLAG_FIFO_OVERRATING ) & &
pll - > pixel_rate_pixel_array < pll - > pixel_rate_csi ) {
dev_dbg ( dev , " device does not support overrating \n " ) ;
return - EINVAL ;
}
2020-09-15 11:37:32 +03:00
return 0 ;
2014-09-16 01:35:18 +04:00
}
2020-07-03 10:48:06 +03:00
# define CPHY_CONST 7
# define DPHY_CONST 16
# define PHY_CONST_DIV 16
2020-08-25 23:23:43 +03:00
static void
2020-09-04 11:52:13 +03:00
ccs_pll_calculate_vt ( struct device * dev , const struct ccs_pll_limits * lim ,
const struct ccs_pll_branch_limits_bk * op_lim_bk ,
struct ccs_pll * pll , struct ccs_pll_branch_fr * pll_fr ,
struct ccs_pll_branch_bk * op_pll_bk , bool cphy ,
uint32_t phy_const )
2011-12-07 20:45:25 +04:00
{
uint32_t sys_div ;
uint32_t best_pix_div = INT_MAX > > 1 ;
uint32_t vt_op_binning_div ;
uint32_t min_vt_div , max_vt_div , vt_div ;
uint32_t min_sys_div , max_sys_div ;
2020-09-04 11:52:13 +03:00
if ( pll - > flags & CCS_PLL_FLAG_NO_OP_CLOCKS )
goto out_calc_pixel_rate ;
2011-12-07 20:45:25 +04:00
/*
2020-08-28 09:24:18 +03:00
* Find out whether a sensor supports derating . If it does not , VT and
* OP domains are required to run at the same pixel rate .
2011-12-07 20:45:25 +04:00
*/
2020-08-28 09:24:18 +03:00
if ( ! ( pll - > flags & CCS_PLL_FLAG_FIFO_DERATING ) ) {
min_vt_div =
op_pll_bk - > sys_clk_div * op_pll_bk - > pix_clk_div
* pll - > vt_lanes * phy_const
/ pll - > op_lanes / PHY_CONST_DIV ;
} else {
/*
* Some sensors perform analogue binning and some do this
* digitally . The ones doing this digitally can be roughly be
* found out using this formula . The ones doing this digitally
* should run at higher clock rate , so smaller divisor is used
* on video timing side .
*/
if ( lim - > min_line_length_pck_bin > lim - > min_line_length_pck
/ pll - > binning_horizontal )
vt_op_binning_div = pll - > binning_horizontal ;
else
vt_op_binning_div = 1 ;
dev_dbg ( dev , " vt_op_binning_div: %u \n " , vt_op_binning_div ) ;
/*
* Profile 2 supports vt_pix_clk_div E [ 4 , 10 ]
*
* Horizontal binning can be used as a base for difference in
* divisors . One must make sure that horizontal blanking is
* enough to accommodate the CSI - 2 sync codes .
*
* Take scaling factor and number of VT lanes into account as well .
*
* Find absolute limits for the factor of vt divider .
*/
dev_dbg ( dev , " scale_m: %u \n " , pll - > scale_m ) ;
min_vt_div =
DIV_ROUND_UP ( pll - > bits_per_pixel
* op_pll_bk - > sys_clk_div * pll - > scale_n
* pll - > vt_lanes * phy_const ,
( pll - > flags &
CCS_PLL_FLAG_LANE_SPEED_MODEL ?
pll - > csi2 . lanes : 1 )
* vt_op_binning_div * pll - > scale_m
* PHY_CONST_DIV ) ;
}
2011-12-07 20:45:25 +04:00
/* Find smallest and biggest allowed vt divisor. */
2014-04-01 17:31:59 +04:00
dev_dbg ( dev , " min_vt_div: %u \n " , min_vt_div ) ;
2011-12-07 20:45:25 +04:00
min_vt_div = max ( min_vt_div ,
2020-08-25 23:23:43 +03:00
DIV_ROUND_UP ( pll_fr - > pll_op_clk_freq_hz ,
2020-06-05 23:46:54 +03:00
lim - > vt_bk . max_pix_clk_freq_hz ) ) ;
2014-04-01 17:31:59 +04:00
dev_dbg ( dev , " min_vt_div: max_vt_pix_clk_freq_hz: %u \n " ,
2011-12-07 20:45:25 +04:00
min_vt_div ) ;
min_vt_div = max_t ( uint32_t , min_vt_div ,
2020-06-05 23:46:54 +03:00
lim - > vt_bk . min_pix_clk_div
* lim - > vt_bk . min_sys_clk_div ) ;
2014-04-01 17:31:59 +04:00
dev_dbg ( dev , " min_vt_div: min_vt_clk_div: %u \n " , min_vt_div ) ;
2011-12-07 20:45:25 +04:00
2020-06-05 23:46:54 +03:00
max_vt_div = lim - > vt_bk . max_sys_clk_div * lim - > vt_bk . max_pix_clk_div ;
2014-04-01 17:31:59 +04:00
dev_dbg ( dev , " max_vt_div: %u \n " , max_vt_div ) ;
2011-12-07 20:45:25 +04:00
max_vt_div = min ( max_vt_div ,
2020-08-25 23:23:43 +03:00
DIV_ROUND_UP ( pll_fr - > pll_op_clk_freq_hz ,
2020-06-05 23:46:54 +03:00
lim - > vt_bk . min_pix_clk_freq_hz ) ) ;
2014-04-01 17:31:59 +04:00
dev_dbg ( dev , " max_vt_div: min_vt_pix_clk_freq_hz: %u \n " ,
2011-12-07 20:45:25 +04:00
max_vt_div ) ;
/*
* Find limitsits for sys_clk_div . Not all values are possible
* with all values of pix_clk_div .
*/
2020-06-05 23:46:54 +03:00
min_sys_div = lim - > vt_bk . min_sys_clk_div ;
2014-04-01 17:31:59 +04:00
dev_dbg ( dev , " min_sys_div: %u \n " , min_sys_div ) ;
2011-12-07 20:45:25 +04:00
min_sys_div = max ( min_sys_div ,
DIV_ROUND_UP ( min_vt_div ,
2020-06-05 23:46:54 +03:00
lim - > vt_bk . max_pix_clk_div ) ) ;
2014-04-01 17:31:59 +04:00
dev_dbg ( dev , " min_sys_div: max_vt_pix_clk_div: %u \n " , min_sys_div ) ;
2011-12-07 20:45:25 +04:00
min_sys_div = max ( min_sys_div ,
2020-08-25 23:23:43 +03:00
pll_fr - > pll_op_clk_freq_hz
2020-06-05 23:46:54 +03:00
/ lim - > vt_bk . max_sys_clk_freq_hz ) ;
2014-04-01 17:31:59 +04:00
dev_dbg ( dev , " min_sys_div: max_pll_op_clk_freq_hz: %u \n " , min_sys_div ) ;
2011-12-07 20:45:25 +04:00
min_sys_div = clk_div_even_up ( min_sys_div ) ;
2014-04-01 17:31:59 +04:00
dev_dbg ( dev , " min_sys_div: one or even: %u \n " , min_sys_div ) ;
2011-12-07 20:45:25 +04:00
2020-06-05 23:46:54 +03:00
max_sys_div = lim - > vt_bk . max_sys_clk_div ;
2014-04-01 17:31:59 +04:00
dev_dbg ( dev , " max_sys_div: %u \n " , max_sys_div ) ;
2011-12-07 20:45:25 +04:00
max_sys_div = min ( max_sys_div ,
DIV_ROUND_UP ( max_vt_div ,
2020-06-05 23:46:54 +03:00
lim - > vt_bk . min_pix_clk_div ) ) ;
2014-04-01 17:31:59 +04:00
dev_dbg ( dev , " max_sys_div: min_vt_pix_clk_div: %u \n " , max_sys_div ) ;
2011-12-07 20:45:25 +04:00
max_sys_div = min ( max_sys_div ,
2020-08-25 23:23:43 +03:00
DIV_ROUND_UP ( pll_fr - > pll_op_clk_freq_hz ,
2020-06-05 23:46:54 +03:00
lim - > vt_bk . min_pix_clk_freq_hz ) ) ;
2014-04-01 17:31:59 +04:00
dev_dbg ( dev , " max_sys_div: min_vt_pix_clk_freq_hz: %u \n " , max_sys_div ) ;
2011-12-07 20:45:25 +04:00
/*
* Find pix_div such that a legal pix_div * sys_div results
* into a value which is not smaller than div , the desired
* divisor .
*/
for ( vt_div = min_vt_div ; vt_div < = max_vt_div ;
vt_div + = 2 - ( vt_div & 1 ) ) {
for ( sys_div = min_sys_div ;
sys_div < = max_sys_div ;
sys_div + = 2 - ( sys_div & 1 ) ) {
2012-10-20 16:08:22 +04:00
uint16_t pix_div = DIV_ROUND_UP ( vt_div , sys_div ) ;
2020-09-01 14:11:11 +03:00
uint16_t rounded_div ;
2011-12-07 20:45:25 +04:00
2020-06-05 23:46:54 +03:00
if ( pix_div < lim - > vt_bk . min_pix_clk_div
| | pix_div > lim - > vt_bk . max_pix_clk_div ) {
2011-12-07 20:45:25 +04:00
dev_dbg ( dev ,
2014-04-01 17:31:59 +04:00
" pix_div %u too small or too big (%u--%u) \n " ,
2011-12-07 20:45:25 +04:00
pix_div ,
2020-06-05 23:46:54 +03:00
lim - > vt_bk . min_pix_clk_div ,
lim - > vt_bk . max_pix_clk_div ) ;
2011-12-07 20:45:25 +04:00
continue ;
}
2020-09-01 14:11:11 +03:00
rounded_div = roundup ( vt_div , best_pix_div ) ;
2011-12-07 20:45:25 +04:00
/* Check if this one is better. */
2020-09-01 14:11:11 +03:00
if ( pix_div * sys_div < = rounded_div )
2011-12-07 20:45:25 +04:00
best_pix_div = pix_div ;
2020-09-01 14:11:11 +03:00
/* Bail out if we've already found the best value. */
if ( vt_div = = rounded_div )
break ;
2011-12-07 20:45:25 +04:00
}
if ( best_pix_div < INT_MAX > > 1 )
break ;
}
2020-09-01 14:08:26 +03:00
pll - > vt_bk . sys_clk_div = DIV_ROUND_UP ( vt_div , best_pix_div ) ;
2020-06-05 23:46:54 +03:00
pll - > vt_bk . pix_clk_div = best_pix_div ;
2011-12-07 20:45:25 +04:00
2020-06-05 23:46:54 +03:00
pll - > vt_bk . sys_clk_freq_hz =
2020-08-25 23:23:43 +03:00
pll_fr - > pll_op_clk_freq_hz / pll - > vt_bk . sys_clk_div ;
2020-06-05 23:46:54 +03:00
pll - > vt_bk . pix_clk_freq_hz =
pll - > vt_bk . sys_clk_freq_hz / pll - > vt_bk . pix_clk_div ;
2020-09-04 11:52:13 +03:00
out_calc_pixel_rate :
pll - > pixel_rate_pixel_array =
pll - > vt_bk . pix_clk_freq_hz * pll - > vt_lanes ;
2020-08-25 23:23:43 +03:00
}
/*
* Heuristically guess the PLL tree for a given common multiplier and
* divisor . Begin with the operational timing and continue to video
* timing once operational timing has been verified .
*
* @ mul is the PLL multiplier and @ div is the common divisor
* ( pre_pll_clk_div and op_sys_clk_div combined ) . The final PLL
* multiplier will be a multiple of @ mul .
*
* @ return Zero on success , error code on error .
*/
static int
2020-09-04 11:52:13 +03:00
ccs_pll_calculate_op ( struct device * dev , const struct ccs_pll_limits * lim ,
const struct ccs_pll_branch_limits_fr * op_lim_fr ,
const struct ccs_pll_branch_limits_bk * op_lim_bk ,
struct ccs_pll * pll , struct ccs_pll_branch_fr * op_pll_fr ,
struct ccs_pll_branch_bk * op_pll_bk , uint32_t mul ,
uint32_t div , uint32_t l , bool cphy , uint32_t phy_const )
2020-08-25 23:23:43 +03:00
{
/*
* Higher multipliers ( and divisors ) are often required than
* necessitated by the external clock and the output clocks .
* There are limits for all values in the clock tree . These
* are the minimum and maximum multiplier for mul .
*/
uint32_t more_mul_min , more_mul_max ;
uint32_t more_mul_factor ;
uint32_t i ;
/*
* Get pre_pll_clk_div so that our pll_op_clk_freq_hz won ' t be
* too high .
*/
dev_dbg ( dev , " op_pre_pll_clk_div %u \n " , op_pll_fr - > pre_pll_clk_div ) ;
/* Don't go above max pll multiplier. */
more_mul_max = op_lim_fr - > max_pll_multiplier / mul ;
dev_dbg ( dev , " more_mul_max: max_op_pll_multiplier check: %u \n " ,
more_mul_max ) ;
/* Don't go above max pll op frequency. */
more_mul_max =
min_t ( uint32_t ,
more_mul_max ,
op_lim_fr - > max_pll_op_clk_freq_hz
/ ( pll - > ext_clk_freq_hz /
op_pll_fr - > pre_pll_clk_div * mul ) ) ;
dev_dbg ( dev , " more_mul_max: max_pll_op_clk_freq_hz check: %u \n " ,
more_mul_max ) ;
/* Don't go above the division capability of op sys clock divider. */
more_mul_max = min ( more_mul_max ,
op_lim_bk - > max_sys_clk_div * op_pll_fr - > pre_pll_clk_div
/ div ) ;
dev_dbg ( dev , " more_mul_max: max_op_sys_clk_div check: %u \n " ,
more_mul_max ) ;
/* Ensure we won't go above max_pll_multiplier. */
more_mul_max = min ( more_mul_max , op_lim_fr - > max_pll_multiplier / mul ) ;
dev_dbg ( dev , " more_mul_max: min_pll_multiplier check: %u \n " ,
more_mul_max ) ;
/* Ensure we won't go below min_pll_op_clk_freq_hz. */
more_mul_min = DIV_ROUND_UP ( op_lim_fr - > min_pll_op_clk_freq_hz ,
pll - > ext_clk_freq_hz /
op_pll_fr - > pre_pll_clk_div * mul ) ;
dev_dbg ( dev , " more_mul_min: min_op_pll_op_clk_freq_hz check: %u \n " ,
more_mul_min ) ;
/* Ensure we won't go below min_pll_multiplier. */
more_mul_min = max ( more_mul_min ,
DIV_ROUND_UP ( op_lim_fr - > min_pll_multiplier , mul ) ) ;
dev_dbg ( dev , " more_mul_min: min_op_pll_multiplier check: %u \n " ,
more_mul_min ) ;
if ( more_mul_min > more_mul_max ) {
dev_dbg ( dev ,
" unable to compute more_mul_min and more_mul_max \n " ) ;
return - EINVAL ;
}
more_mul_factor = lcm ( div , op_pll_fr - > pre_pll_clk_div ) / div ;
dev_dbg ( dev , " more_mul_factor: %u \n " , more_mul_factor ) ;
more_mul_factor = lcm ( more_mul_factor , op_lim_bk - > min_sys_clk_div ) ;
dev_dbg ( dev , " more_mul_factor: min_op_sys_clk_div: %d \n " ,
more_mul_factor ) ;
i = roundup ( more_mul_min , more_mul_factor ) ;
if ( ! is_one_or_even ( i ) )
i < < = 1 ;
dev_dbg ( dev , " final more_mul: %u \n " , i ) ;
if ( i > more_mul_max ) {
dev_dbg ( dev , " final more_mul is bad, max %u \n " , more_mul_max ) ;
return - EINVAL ;
}
op_pll_fr - > pll_multiplier = mul * i ;
op_pll_bk - > sys_clk_div = div * i / op_pll_fr - > pre_pll_clk_div ;
dev_dbg ( dev , " op_sys_clk_div: %u \n " , op_pll_bk - > sys_clk_div ) ;
op_pll_fr - > pll_ip_clk_freq_hz = pll - > ext_clk_freq_hz
/ op_pll_fr - > pre_pll_clk_div ;
op_pll_fr - > pll_op_clk_freq_hz = op_pll_fr - > pll_ip_clk_freq_hz
* op_pll_fr - > pll_multiplier ;
if ( pll - > flags & CCS_PLL_FLAG_LANE_SPEED_MODEL )
op_pll_bk - > pix_clk_div = pll - > bits_per_pixel
* pll - > op_lanes * phy_const
/ PHY_CONST_DIV / pll - > csi2 . lanes / l ;
else
op_pll_bk - > pix_clk_div =
pll - > bits_per_pixel * phy_const / PHY_CONST_DIV / l ;
op_pll_bk - > pix_clk_freq_hz =
op_pll_bk - > sys_clk_freq_hz / op_pll_bk - > pix_clk_div ;
dev_dbg ( dev , " op_pix_clk_div: %u \n " , op_pll_bk - > pix_clk_div ) ;
2020-09-04 11:52:13 +03:00
return 0 ;
2011-12-07 20:45:25 +04:00
}
2012-10-22 23:27:27 +04:00
2020-06-05 23:46:54 +03:00
int ccs_pll_calculate ( struct device * dev , const struct ccs_pll_limits * lim ,
2020-05-28 00:59:40 +03:00
struct ccs_pll * pll )
2012-10-22 23:27:27 +04:00
{
2020-06-05 23:46:54 +03:00
const struct ccs_pll_branch_limits_fr * op_lim_fr = & lim - > vt_fr ;
const struct ccs_pll_branch_limits_bk * op_lim_bk = & lim - > op_bk ;
struct ccs_pll_branch_fr * op_pll_fr = & pll - > vt_fr ;
struct ccs_pll_branch_bk * op_pll_bk = & pll - > op_bk ;
2020-07-03 10:48:06 +03:00
bool cphy = pll - > bus_type = = CCS_PLL_BUS_TYPE_CSI2_CPHY ;
uint32_t phy_const = cphy ? CPHY_CONST : DPHY_CONST ;
2020-06-05 23:46:54 +03:00
uint16_t min_op_pre_pll_clk_div ;
uint16_t max_op_pre_pll_clk_div ;
2012-10-22 23:27:27 +04:00
uint32_t mul , div ;
2020-08-07 12:00:59 +03:00
uint32_t l = ( ! pll - > op_bits_per_lane | |
pll - > op_bits_per_lane > = pll - > bits_per_pixel ) ? 1 : 2 ;
2020-08-25 23:31:23 +03:00
uint32_t i ;
2012-10-22 23:27:27 +04:00
int rval = - EINVAL ;
2020-06-22 13:16:24 +03:00
if ( ! ( pll - > flags & CCS_PLL_FLAG_LANE_SPEED_MODEL ) ) {
pll - > op_lanes = 1 ;
pll - > vt_lanes = 1 ;
}
2020-08-07 12:07:14 +03:00
2020-08-04 13:58:30 +03:00
if ( ! pll - > op_lanes | | ! pll - > vt_lanes | | ! pll - > bits_per_pixel | |
! pll - > ext_clk_freq_hz | | ! pll - > link_freq | | ! pll - > scale_m | |
! op_lim_fr - > min_pll_ip_clk_freq_hz | |
! op_lim_fr - > max_pll_ip_clk_freq_hz | |
! op_lim_fr - > min_pll_op_clk_freq_hz | |
! op_lim_fr - > max_pll_op_clk_freq_hz | |
! op_lim_bk - > max_sys_clk_div | | ! op_lim_fr - > max_pll_multiplier )
return - EINVAL ;
2020-08-07 12:07:14 +03:00
/*
* Make sure op_pix_clk_div will be integer - - - unless flexible
* op_pix_clk_div is supported
*/
if ( ! ( pll - > flags & CCS_PLL_FLAG_FLEXIBLE_OP_PIX_CLK_DIV ) & &
( pll - > bits_per_pixel * pll - > op_lanes ) % ( pll - > csi2 . lanes * l ) ) {
dev_dbg ( dev , " op_pix_clk_div not an integer (bpp %u, op lanes %u, lanes %u, l %u) \n " ,
pll - > bits_per_pixel , pll - > op_lanes , pll - > csi2 . lanes , l ) ;
return - EINVAL ;
}
2020-06-22 13:16:24 +03:00
dev_dbg ( dev , " vt_lanes: %u \n " , pll - > vt_lanes ) ;
dev_dbg ( dev , " op_lanes: %u \n " , pll - > op_lanes ) ;
2020-05-28 00:59:40 +03:00
if ( pll - > flags & CCS_PLL_FLAG_NO_OP_CLOCKS ) {
2014-09-16 16:39:08 +04:00
/*
* If there ' s no OP PLL at all , use the VT values
* instead . The OP values are ignored for the rest of
* the PLL calculation .
*/
2020-06-05 23:46:54 +03:00
op_lim_fr = & lim - > vt_fr ;
op_lim_bk = & lim - > vt_bk ;
op_pll_bk = & pll - > vt_bk ;
2014-09-16 16:39:08 +04:00
}
2014-04-01 17:31:59 +04:00
dev_dbg ( dev , " binning: %ux%u \n " , pll - > binning_horizontal ,
2012-10-22 23:27:27 +04:00
pll - > binning_vertical ) ;
2012-10-20 17:35:25 +04:00
switch ( pll - > bus_type ) {
2020-06-22 13:37:45 +03:00
case CCS_PLL_BUS_TYPE_CSI2_DPHY :
2012-10-20 17:35:25 +04:00
/* CSI transfers 2 bits per clock per lane; thus times 2 */
2020-06-26 12:56:47 +03:00
op_pll_bk - > sys_clk_freq_hz = pll - > link_freq * 2
2020-06-22 13:16:24 +03:00
* ( pll - > flags & CCS_PLL_FLAG_LANE_SPEED_MODEL ?
2020-06-18 13:39:44 +03:00
1 : pll - > csi2 . lanes ) ;
2012-10-20 17:35:25 +04:00
break ;
2020-07-03 10:48:06 +03:00
case CCS_PLL_BUS_TYPE_CSI2_CPHY :
op_pll_bk - > sys_clk_freq_hz =
pll - > link_freq
* ( pll - > flags & CCS_PLL_FLAG_LANE_SPEED_MODEL ?
1 : pll - > csi2 . lanes ) ;
break ;
2012-10-20 17:35:25 +04:00
default :
return - EINVAL ;
}
2012-10-22 23:27:27 +04:00
2020-06-22 13:16:24 +03:00
pll - > pixel_rate_csi =
2020-07-03 10:48:06 +03:00
div_u64 ( ( uint64_t ) op_pll_bk - > sys_clk_freq_hz
* ( pll - > flags & CCS_PLL_FLAG_LANE_SPEED_MODEL ?
pll - > csi2 . lanes : 1 ) * PHY_CONST_DIV ,
phy_const * pll - > bits_per_pixel * l ) ;
2020-06-22 13:16:24 +03:00
2020-06-05 23:46:54 +03:00
/* Figure out limits for OP pre-pll divider based on extclk */
dev_dbg ( dev , " min / max op_pre_pll_clk_div: %u / %u \n " ,
op_lim_fr - > min_pre_pll_clk_div , op_lim_fr - > max_pre_pll_clk_div ) ;
max_op_pre_pll_clk_div =
min_t ( uint16_t , op_lim_fr - > max_pre_pll_clk_div ,
2012-10-22 23:27:27 +04:00
clk_div_even ( pll - > ext_clk_freq_hz /
2020-06-05 23:46:54 +03:00
op_lim_fr - > min_pll_ip_clk_freq_hz ) ) ;
min_op_pre_pll_clk_div =
max_t ( uint16_t , op_lim_fr - > min_pre_pll_clk_div ,
2012-10-22 23:27:27 +04:00
clk_div_even_up (
DIV_ROUND_UP ( pll - > ext_clk_freq_hz ,
2020-06-05 23:46:54 +03:00
op_lim_fr - > max_pll_ip_clk_freq_hz ) ) ) ;
dev_dbg ( dev , " pre-pll check: min / max op_pre_pll_clk_div: %u / %u \n " ,
min_op_pre_pll_clk_div , max_op_pre_pll_clk_div ) ;
2012-10-22 23:27:27 +04:00
2020-06-26 12:56:47 +03:00
i = gcd ( op_pll_bk - > sys_clk_freq_hz , pll - > ext_clk_freq_hz ) ;
mul = op_pll_bk - > sys_clk_freq_hz / i ;
2012-10-22 23:27:27 +04:00
div = pll - > ext_clk_freq_hz / i ;
2014-04-01 17:31:59 +04:00
dev_dbg ( dev , " mul %u / div %u \n " , mul , div ) ;
2012-10-22 23:27:27 +04:00
2020-06-05 23:46:54 +03:00
min_op_pre_pll_clk_div =
max_t ( uint16_t , min_op_pre_pll_clk_div ,
2012-10-22 23:27:27 +04:00
clk_div_even_up (
2020-07-07 11:31:56 +03:00
mul /
one_or_more (
DIV_ROUND_UP ( op_lim_fr - > max_pll_op_clk_freq_hz ,
pll - > ext_clk_freq_hz ) ) ) ) ;
2020-06-05 23:46:54 +03:00
dev_dbg ( dev , " pll_op check: min / max op_pre_pll_clk_div: %u / %u \n " ,
min_op_pre_pll_clk_div , max_op_pre_pll_clk_div ) ;
for ( op_pll_fr - > pre_pll_clk_div = min_op_pre_pll_clk_div ;
op_pll_fr - > pre_pll_clk_div < = max_op_pre_pll_clk_div ;
2020-06-23 14:40:32 +03:00
op_pll_fr - > pre_pll_clk_div + =
( pll - > flags & CCS_PLL_FLAG_EXT_IP_PLL_DIVIDER ) ? 1 :
2 - ( op_pll_fr - > pre_pll_clk_div & 1 ) ) {
2020-09-04 11:52:13 +03:00
rval = ccs_pll_calculate_op ( dev , lim , op_lim_fr , op_lim_bk , pll ,
op_pll_fr , op_pll_bk , mul , div , l ,
cphy , phy_const ) ;
if ( rval )
continue ;
2020-09-15 11:37:32 +03:00
rval = check_fr_bounds ( dev , lim , pll , PLL_VT ) ;
if ( rval )
continue ;
rval = check_bk_bounds ( dev , lim , pll , PLL_OP ) ;
if ( rval )
continue ;
2020-09-04 11:52:13 +03:00
ccs_pll_calculate_vt ( dev , lim , op_lim_bk , pll , op_pll_fr ,
op_pll_bk , cphy , phy_const ) ;
2020-09-15 11:37:32 +03:00
rval = check_bk_bounds ( dev , lim , pll , PLL_VT ) ;
if ( rval )
continue ;
rval = check_ext_bounds ( dev , pll ) ;
2012-10-22 23:27:27 +04:00
if ( rval )
continue ;
print_pll ( dev , pll ) ;
2020-09-04 11:52:13 +03:00
2012-10-22 23:27:27 +04:00
return 0 ;
}
2016-09-08 16:50:07 +03:00
dev_dbg ( dev , " unable to compute pre_pll divisor \n " ) ;
2012-10-22 23:27:27 +04:00
return rval ;
}
2020-05-28 00:59:40 +03:00
EXPORT_SYMBOL_GPL ( ccs_pll_calculate ) ;
2011-12-07 20:45:25 +04:00
2020-06-25 00:57:46 +03:00
MODULE_AUTHOR ( " Sakari Ailus <sakari.ailus@linux.intel.com> " ) ;
2020-05-28 00:59:40 +03:00
MODULE_DESCRIPTION ( " Generic MIPI CCS/SMIA/SMIA++ PLL calculator " ) ;
2020-10-02 14:36:14 +03:00
MODULE_LICENSE ( " GPL v2 " ) ;