X-Git-Url: http://mj.ucw.cz/gitweb/?a=blobdiff_plain;f=lib%2Fhashfunc.h;h=fc836b3848588a7945b26380caa65dbac4355580;hb=8c5b1e84628ab5927a20d9f5755826f5ed9cf990;hp=14133220faef80ef1b3d69dfbcdfacb3ca23aeef;hpb=49ed04e2e93a6a5b01058638224621d5c07db01c;p=libucw.git diff --git a/lib/hashfunc.h b/lib/hashfunc.h index 14133220..fc836b38 100644 --- a/lib/hashfunc.h +++ b/lib/hashfunc.h @@ -1,6 +1,6 @@ /* - * Hyper-super-meta-alt-control-shift extra fast str_len() and hash_*() - * routines + * UCW Library -- Hyper-super-meta-alt-control-shift extra fast + * str_len() and hash_*() routines * * (c) 2002, Robert Spalek * @@ -8,31 +8,36 @@ * of the GNU Lesser General Public License. */ -#ifndef _SHERLOCK_HASHFUNC_H -#define _SHERLOCK_HASHFUNC_H +#ifndef _UCW_HASHFUNC_H +#define _UCW_HASHFUNC_H #include "lib/lib.h" -/* An equivalent of the Intel's rol instruction. */ -#define ROL(x, bits) (((x) << (bits)) | ((x) >> (sizeof(uns)*8 - (bits)))) - /* The following functions need str to be aligned to uns. */ -uns str_len_aligned(const byte *str) CONST; -uns hash_string_aligned(const byte *str) CONST; -uns hash_block_aligned(const byte *str, uns len) CONST; +uns str_len_aligned(const byte *str) PURE; +uns hash_string_aligned(const byte *str) PURE; +uns hash_block_aligned(const byte *str, uns len) PURE; #ifdef CPU_ALLOW_UNALIGNED #define str_len(str) str_len_aligned(str) #define hash_string(str) hash_string_aligned(str) #define hash_block(str, len) hash_block_aligned(str, len) #else -uns str_len(const byte *str) CONST; -uns hash_string(const byte *str) CONST; -uns hash_block(const byte *str, uns len) CONST; +uns str_len(const byte *str) PURE; +uns hash_string(const byte *str) PURE; +uns hash_block(const byte *str, uns len) PURE; #endif -uns hash_string_nocase(const byte *str) CONST; +uns hash_string_nocase(const byte *str) PURE; -static inline uns CONST hash_int(uns x) { return 6442450967*x; } +/* + * We hash integers by multiplying by a reasonably large prime with + * few ones in its binary form (to gave the compiler the possibility + * of using shifts and adds on architectures where multiplication + * instructions are slow). + */ +static inline uns CONST hash_u32(uns x) { return 0x01008041*x; } +static inline uns CONST hash_u64(u64 x) { return hash_u32((uns)x ^ (uns)(x >> 32)); } +static inline uns CONST hash_pointer(void *x) { return ((sizeof(x) <= 4) ? hash_u32((uns)(addr_int_t)x) : hash_u64((u64)(addr_int_t)x)); } #endif