summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorVictor Stinner <vstinner@python.org>2020-05-12 16:46:20 (GMT)
committerGitHub <noreply@github.com>2020-05-12 16:46:20 (GMT)
commitf453221c8b80e0570066a9375337f208d50e6406 (patch)
tree9ea682c978b6a302388c4df1b8174d5188e0b2d2
parent4c9ea093cd752a6687864674d34250653653f743 (diff)
downloadcpython-f453221c8b80e0570066a9375337f208d50e6406.zip
cpython-f453221c8b80e0570066a9375337f208d50e6406.tar.gz
cpython-f453221c8b80e0570066a9375337f208d50e6406.tar.bz2
bpo-40602: Add _Py_HashPointerRaw() function (GH-20056)
Add a new _Py_HashPointerRaw() function which avoids replacing -1 with -2 to micro-optimize hash table using pointer keys: using _Py_hashtable_hash_ptr() hash function.
-rw-r--r--Include/pyhash.h2
-rw-r--r--Python/hashtable.c2
-rw-r--r--Python/pyhash.c14
3 files changed, 13 insertions, 5 deletions
diff --git a/Include/pyhash.h b/Include/pyhash.h
index 2f39858..4437b87 100644
--- a/Include/pyhash.h
+++ b/Include/pyhash.h
@@ -9,6 +9,8 @@ extern "C" {
#ifndef Py_LIMITED_API
PyAPI_FUNC(Py_hash_t) _Py_HashDouble(double);
PyAPI_FUNC(Py_hash_t) _Py_HashPointer(const void*);
+// Similar to _Py_HashPointer(), but don't replace -1 with -2
+PyAPI_FUNC(Py_hash_t) _Py_HashPointerRaw(const void*);
PyAPI_FUNC(Py_hash_t) _Py_HashBytes(const void*, Py_ssize_t);
#endif
diff --git a/Python/hashtable.c b/Python/hashtable.c
index 1548c2e..90fe34e 100644
--- a/Python/hashtable.c
+++ b/Python/hashtable.c
@@ -109,7 +109,7 @@ _Py_hashtable_hash_ptr(struct _Py_hashtable_t *ht, const void *pkey)
{
void *key;
_Py_HASHTABLE_READ_KEY(ht, pkey, key);
- return (Py_uhash_t)_Py_HashPointer(key);
+ return (Py_uhash_t)_Py_HashPointerRaw(key);
}
diff --git a/Python/pyhash.c b/Python/pyhash.c
index a6f42e7..3843079 100644
--- a/Python/pyhash.c
+++ b/Python/pyhash.c
@@ -129,16 +129,22 @@ _Py_HashDouble(double v)
}
Py_hash_t
-_Py_HashPointer(const void *p)
+_Py_HashPointerRaw(const void *p)
{
- Py_hash_t x;
size_t y = (size_t)p;
/* bottom 3 or 4 bits are likely to be 0; rotate y by 4 to avoid
excessive hash collisions for dicts and sets */
y = (y >> 4) | (y << (8 * SIZEOF_VOID_P - 4));
- x = (Py_hash_t)y;
- if (x == -1)
+ return (Py_hash_t)y;
+}
+
+Py_hash_t
+_Py_HashPointer(const void *p)
+{
+ Py_hash_t x = _Py_HashPointerRaw(p);
+ if (x == -1) {
x = -2;
+ }
return x;
}