2020-11-05 23:39:33 -05:00
// SPDX-License-Identifier: GPL-2.0
# include <linux/bitops.h>
# include <linux/math.h>
2021-07-13 16:03:51 -04:00
# include <linux/string.h>
2020-11-05 23:39:33 -05:00
# include <asm/unaligned.h>
2021-10-12 21:11:25 -06:00
# ifdef CONFIG_VALGRIND
# include <valgrind/memcheck.h>
# endif
2020-11-05 23:39:33 -05:00
# include "varint.h"
2021-07-13 16:03:51 -04:00
/**
* bch2_varint_encode - encode a variable length integer
2023-09-12 18:41:22 -04:00
* @ out : destination to encode to
* @ v : unsigned integer to encode
* Returns : size in bytes of the encoded integer - at most 9 bytes
2021-07-13 16:03:51 -04:00
*/
2020-11-05 23:39:33 -05:00
int bch2_varint_encode ( u8 * out , u64 v )
{
unsigned bits = fls64 ( v | 1 ) ;
unsigned bytes = DIV_ROUND_UP ( bits , 7 ) ;
2023-07-06 22:47:42 -04:00
__le64 v_le ;
2020-11-05 23:39:33 -05:00
if ( likely ( bytes < 9 ) ) {
v < < = bytes ;
v | = ~ ( ~ 0 < < ( bytes - 1 ) ) ;
2023-07-06 22:47:42 -04:00
v_le = cpu_to_le64 ( v ) ;
memcpy ( out , & v_le , bytes ) ;
2020-11-05 23:39:33 -05:00
} else {
* out + + = 255 ;
bytes = 9 ;
2021-07-13 16:03:51 -04:00
put_unaligned_le64 ( v , out ) ;
2020-11-05 23:39:33 -05:00
}
return bytes ;
}
2021-07-13 16:03:51 -04:00
/**
* bch2_varint_decode - encode a variable length integer
2023-09-12 18:41:22 -04:00
* @ in : varint to decode
* @ end : end of buffer to decode from
* @ out : on success , decoded integer
* Returns : size in bytes of the decoded integer - or - 1 on failure ( would
2021-07-13 16:03:51 -04:00
* have read past the end of the buffer )
*/
2020-11-05 23:39:33 -05:00
int bch2_varint_decode ( const u8 * in , const u8 * end , u64 * out )
2021-07-13 16:03:51 -04:00
{
unsigned bytes = likely ( in < end )
? ffz ( * in & 255 ) + 1
: 1 ;
u64 v ;
if ( unlikely ( in + bytes > end ) )
return - 1 ;
if ( likely ( bytes < 9 ) ) {
2023-07-06 22:47:42 -04:00
__le64 v_le = 0 ;
2023-08-07 12:04:05 -04:00
2023-07-06 22:47:42 -04:00
memcpy ( & v_le , in , bytes ) ;
v = le64_to_cpu ( v_le ) ;
2021-07-13 16:03:51 -04:00
v > > = bytes ;
} else {
v = get_unaligned_le64 ( + + in ) ;
}
* out = v ;
return bytes ;
}
/**
* bch2_varint_encode_fast - fast version of bch2_varint_encode
2023-09-12 18:41:22 -04:00
* @ out : destination to encode to
* @ v : unsigned integer to encode
* Returns : size in bytes of the encoded integer - at most 9 bytes
2021-07-13 16:03:51 -04:00
*
* This version assumes it ' s always safe to write 8 bytes to @ out , even if the
* encoded integer would be smaller .
*/
int bch2_varint_encode_fast ( u8 * out , u64 v )
{
unsigned bits = fls64 ( v | 1 ) ;
unsigned bytes = DIV_ROUND_UP ( bits , 7 ) ;
if ( likely ( bytes < 9 ) ) {
v < < = bytes ;
v | = ~ ( ~ 0 < < ( bytes - 1 ) ) ;
} else {
* out + + = 255 ;
bytes = 9 ;
}
put_unaligned_le64 ( v , out ) ;
return bytes ;
}
/**
* bch2_varint_decode_fast - fast version of bch2_varint_decode
2023-09-12 18:41:22 -04:00
* @ in : varint to decode
* @ end : end of buffer to decode from
* @ out : on success , decoded integer
* Returns : size in bytes of the decoded integer - or - 1 on failure ( would
* have read past the end of the buffer )
2021-07-13 16:03:51 -04:00
*
* This version assumes that it is safe to read at most 8 bytes past the end of
* @ end ( we still return an error if the varint extends past @ end ) .
*/
int bch2_varint_decode_fast ( const u8 * in , const u8 * end , u64 * out )
2020-11-05 23:39:33 -05:00
{
2021-10-12 21:11:25 -06:00
# ifdef CONFIG_VALGRIND
VALGRIND_MAKE_MEM_DEFINED ( in , 8 ) ;
# endif
2020-11-05 23:39:33 -05:00
u64 v = get_unaligned_le64 ( in ) ;
2021-08-17 15:03:53 -04:00
unsigned bytes = ffz ( * in ) + 1 ;
2020-11-05 23:39:33 -05:00
if ( unlikely ( in + bytes > end ) )
return - 1 ;
if ( likely ( bytes < 9 ) ) {
v > > = bytes ;
v & = ~ ( ~ 0ULL < < ( 7 * bytes ) ) ;
} else {
v = get_unaligned_le64 ( + + in ) ;
}
* out = v ;
return bytes ;
}