"Jonathan Tan via GitGitGadget" <gitgitgadget@xxxxxxxxx> writes: > diff --git a/pack-objects.h b/pack-objects.h > index b9898a4e64b..15be8368d21 100644 > --- a/pack-objects.h > +++ b/pack-objects.h > @@ -207,6 +207,34 @@ static inline uint32_t pack_name_hash(const char *name) > return hash; > } > > +static inline uint32_t pack_name_hash_v2(const char *name) > +{ > + uint32_t hash = 0, base = 0, c; > + > + if (!name) > + return 0; > + > + while ((c = *name++)) { > + if (isspace(c)) > + continue; > + if (c == '/') { > + base = (base >> 6) ^ hash; > + hash = 0; > + } else { > + /* > + * 'c' is only a single byte. Reverse it and move > + * it to the top of the hash, moving the rest to > + * less-significant bits. > + */ > + c = (c & 0xF0) >> 4 | (c & 0x0F) << 4; > + c = (c & 0xCC) >> 2 | (c & 0x33) << 2; > + c = (c & 0xAA) >> 1 | (c & 0x55) << 1; > + hash = (hash >> 2) + (c << 24); > + } > + } > + return (base >> 6) ^ hash; > +} This works because `c` is masked before any arithmetic is performed on it, but the cast from potentially signed char to uint32_t still makes me nervous - if char is signed, it behaves as if it was first cast to int32_t and only then to uint32_t, as you can see from running the code below: #include <stdio.h> int main() { signed char c = 128; unsigned int u = c; printf("hello %u\n", u); return 0; } I would declare `c` as uint8_t or unsigned char.