summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--Include/cpython/object.h1
-rw-r--r--Include/internal/pycore_dict.h2
-rw-r--r--Include/internal/pycore_object.h10
-rw-r--r--Include/object.h1
-rw-r--r--Include/opcode.h21
-rw-r--r--Lib/opcode.py5
-rw-r--r--Lib/test/test_descr.py10
-rw-r--r--Lib/test/test_dict.py5
-rw-r--r--Lib/test/test_gc.py29
-rw-r--r--Lib/test/test_sys.py10
-rw-r--r--Misc/NEWS.d/next/Core and Builtins/2021-10-08-09-47-38.bpo-45340.ukHgDb.rst3
-rw-r--r--Objects/dictobject.c434
-rw-r--r--Objects/object.c185
-rw-r--r--Objects/typeobject.c99
-rw-r--r--Python/ceval.c72
-rw-r--r--Python/opcode_targets.h18
-rw-r--r--Python/specialize.c141
-rwxr-xr-xTools/gdb/libpython.py75
18 files changed, 721 insertions, 400 deletions
diff --git a/Include/cpython/object.h b/Include/cpython/object.h
index 5ae6f36..849d5aa 100644
--- a/Include/cpython/object.h
+++ b/Include/cpython/object.h
@@ -270,6 +270,7 @@ struct _typeobject {
destructor tp_finalize;
vectorcallfunc tp_vectorcall;
+ Py_ssize_t tp_inline_values_offset;
};
/* The *real* layout of a type object when allocated on the heap */
diff --git a/Include/internal/pycore_dict.h b/Include/internal/pycore_dict.h
index d37ef71..13cb7cc 100644
--- a/Include/internal/pycore_dict.h
+++ b/Include/internal/pycore_dict.h
@@ -101,6 +101,8 @@ extern uint64_t _pydict_global_version;
#define DICT_NEXT_VERSION() (++_pydict_global_version)
+PyObject *_PyObject_MakeDictFromInstanceAttributes(PyObject *obj, PyDictValues *values);
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h
index 82dddf1..3c126aa 100644
--- a/Include/internal/pycore_object.h
+++ b/Include/internal/pycore_object.h
@@ -181,6 +181,16 @@ extern int _Py_CheckSlotResult(
extern PyObject* _PyType_AllocNoTrack(PyTypeObject *type, Py_ssize_t nitems);
extern int _PyObject_InitializeDict(PyObject *obj);
+extern int _PyObject_StoreInstanceAttribute(PyObject *obj, PyDictValues *values,
+ PyObject *name, PyObject *value);
+PyObject * _PyObject_GetInstanceAttribute(PyObject *obj, PyDictValues *values,
+ PyObject *name);
+PyDictValues ** _PyObject_ValuesPointer(PyObject *);
+PyObject ** _PyObject_DictPointer(PyObject *);
+int _PyObject_VisitInstanceAttributes(PyObject *self, visitproc visit, void *arg);
+void _PyObject_ClearInstanceAttributes(PyObject *self);
+void _PyObject_FreeInstanceAttributes(PyObject *self);
+int _PyObject_IsInstanceDictEmpty(PyObject *);
#ifdef __cplusplus
}
diff --git a/Include/object.h b/Include/object.h
index c3062cb..7f050b8 100644
--- a/Include/object.h
+++ b/Include/object.h
@@ -333,6 +333,7 @@ given type object has a specified feature.
*/
#ifndef Py_LIMITED_API
+
/* Set if instances of the type object are treated as sequences for pattern matching */
#define Py_TPFLAGS_SEQUENCE (1 << 5)
/* Set if instances of the type object are treated as mappings for pattern matching */
diff --git a/Include/opcode.h b/Include/opcode.h
index 8817a4d..15f7226 100644
--- a/Include/opcode.h
+++ b/Include/opcode.h
@@ -147,7 +147,7 @@ extern "C" {
#define BINARY_SUBSCR_DICT 39
#define JUMP_ABSOLUTE_QUICK 40
#define LOAD_ATTR_ADAPTIVE 41
-#define LOAD_ATTR_SPLIT_KEYS 42
+#define LOAD_ATTR_INSTANCE_VALUE 42
#define LOAD_ATTR_WITH_HINT 43
#define LOAD_ATTR_SLOT 44
#define LOAD_ATTR_MODULE 45
@@ -158,15 +158,16 @@ extern "C" {
#define LOAD_METHOD_CACHED 80
#define LOAD_METHOD_CLASS 81
#define LOAD_METHOD_MODULE 87
-#define STORE_ATTR_ADAPTIVE 88
-#define STORE_ATTR_SPLIT_KEYS 120
-#define STORE_ATTR_SLOT 122
-#define STORE_ATTR_WITH_HINT 123
-#define LOAD_FAST__LOAD_FAST 127
-#define STORE_FAST__LOAD_FAST 128
-#define LOAD_FAST__LOAD_CONST 134
-#define LOAD_CONST__LOAD_FAST 140
-#define STORE_FAST__STORE_FAST 143
+#define LOAD_METHOD_NO_DICT 88
+#define STORE_ATTR_ADAPTIVE 120
+#define STORE_ATTR_INSTANCE_VALUE 122
+#define STORE_ATTR_SLOT 123
+#define STORE_ATTR_WITH_HINT 127
+#define LOAD_FAST__LOAD_FAST 128
+#define STORE_FAST__LOAD_FAST 134
+#define LOAD_FAST__LOAD_CONST 140
+#define LOAD_CONST__LOAD_FAST 143
+#define STORE_FAST__STORE_FAST 149
#define DO_TRACING 255
#ifdef NEED_OPCODE_JUMP_TABLES
static uint32_t _PyOpcode_RelativeJump[8] = {
diff --git a/Lib/opcode.py b/Lib/opcode.py
index 5d35674..efd6aef 100644
--- a/Lib/opcode.py
+++ b/Lib/opcode.py
@@ -231,7 +231,7 @@ _specialized_instructions = [
"BINARY_SUBSCR_DICT",
"JUMP_ABSOLUTE_QUICK",
"LOAD_ATTR_ADAPTIVE",
- "LOAD_ATTR_SPLIT_KEYS",
+ "LOAD_ATTR_INSTANCE_VALUE",
"LOAD_ATTR_WITH_HINT",
"LOAD_ATTR_SLOT",
"LOAD_ATTR_MODULE",
@@ -242,8 +242,9 @@ _specialized_instructions = [
"LOAD_METHOD_CACHED",
"LOAD_METHOD_CLASS",
"LOAD_METHOD_MODULE",
+ "LOAD_METHOD_NO_DICT",
"STORE_ATTR_ADAPTIVE",
- "STORE_ATTR_SPLIT_KEYS",
+ "STORE_ATTR_INSTANCE_VALUE",
"STORE_ATTR_SLOT",
"STORE_ATTR_WITH_HINT",
# Super instructions
diff --git a/Lib/test/test_descr.py b/Lib/test/test_descr.py
index af7848c..a5404b3 100644
--- a/Lib/test/test_descr.py
+++ b/Lib/test/test_descr.py
@@ -5500,17 +5500,19 @@ class SharedKeyTests(unittest.TestCase):
class B(A):
pass
+ #Shrink keys by repeatedly creating instances
+ [(A(), B()) for _ in range(20)]
+
a, b = A(), B()
self.assertEqual(sys.getsizeof(vars(a)), sys.getsizeof(vars(b)))
self.assertLess(sys.getsizeof(vars(a)), sys.getsizeof({"a":1}))
- # Initial hash table can contain at most 5 elements.
+ # Initial hash table can contain only one or two elements.
# Set 6 attributes to cause internal resizing.
a.x, a.y, a.z, a.w, a.v, a.u = range(6)
self.assertNotEqual(sys.getsizeof(vars(a)), sys.getsizeof(vars(b)))
a2 = A()
- self.assertEqual(sys.getsizeof(vars(a)), sys.getsizeof(vars(a2)))
- self.assertLess(sys.getsizeof(vars(a)), sys.getsizeof({"a":1}))
- b.u, b.v, b.w, b.t, b.s, b.r = range(6)
+ self.assertGreater(sys.getsizeof(vars(a)), sys.getsizeof(vars(a2)))
+ self.assertLess(sys.getsizeof(vars(a2)), sys.getsizeof({"a":1}))
self.assertLess(sys.getsizeof(vars(b)), sys.getsizeof({"a":1}))
diff --git a/Lib/test/test_dict.py b/Lib/test/test_dict.py
index 4014375..b43c83a 100644
--- a/Lib/test/test_dict.py
+++ b/Lib/test/test_dict.py
@@ -994,8 +994,8 @@ class DictTest(unittest.TestCase):
@support.cpython_only
def test_splittable_setdefault(self):
- """split table must be combined when setdefault()
- breaks insertion order"""
+ """split table must keep correct insertion
+ order when attributes are adding using setdefault()"""
a, b = self.make_shared_key_dict(2)
a['a'] = 1
@@ -1005,7 +1005,6 @@ class DictTest(unittest.TestCase):
size_b = sys.getsizeof(b)
b['a'] = 1
- self.assertGreater(size_b, size_a)
self.assertEqual(list(a), ['x', 'y', 'z', 'a', 'b'])
self.assertEqual(list(b), ['x', 'y', 'z', 'b', 'a'])
diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py
index 6c28b2b..52948f1 100644
--- a/Lib/test/test_gc.py
+++ b/Lib/test/test_gc.py
@@ -444,7 +444,7 @@ class GCTests(unittest.TestCase):
# 0, thus mutating the trash graph as a side effect of merely asking
# whether __del__ exists. This used to (before 2.3b1) crash Python.
# Now __getattr__ isn't called.
- self.assertEqual(gc.collect(), 4)
+ self.assertEqual(gc.collect(), 2)
self.assertEqual(len(gc.garbage), garbagelen)
def test_boom2(self):
@@ -471,7 +471,7 @@ class GCTests(unittest.TestCase):
# there isn't a second time, so this simply cleans up the trash cycle.
# We expect a, b, a.__dict__ and b.__dict__ (4 objects) to get
# reclaimed this way.
- self.assertEqual(gc.collect(), 4)
+ self.assertEqual(gc.collect(), 2)
self.assertEqual(len(gc.garbage), garbagelen)
def test_boom_new(self):
@@ -491,7 +491,7 @@ class GCTests(unittest.TestCase):
gc.collect()
garbagelen = len(gc.garbage)
del a, b
- self.assertEqual(gc.collect(), 4)
+ self.assertEqual(gc.collect(), 2)
self.assertEqual(len(gc.garbage), garbagelen)
def test_boom2_new(self):
@@ -513,7 +513,7 @@ class GCTests(unittest.TestCase):
gc.collect()
garbagelen = len(gc.garbage)
del a, b
- self.assertEqual(gc.collect(), 4)
+ self.assertEqual(gc.collect(), 2)
self.assertEqual(len(gc.garbage), garbagelen)
def test_get_referents(self):
@@ -943,8 +943,8 @@ class GCTests(unittest.TestCase):
A()
t = gc.collect()
c, nc = getstats()
- self.assertEqual(t, 2*N) # instance object & its dict
- self.assertEqual(c - oldc, 2*N)
+ self.assertEqual(t, N) # instance objects
+ self.assertEqual(c - oldc, N)
self.assertEqual(nc - oldnc, 0)
# But Z() is not actually collected.
@@ -964,8 +964,8 @@ class GCTests(unittest.TestCase):
Z()
t = gc.collect()
c, nc = getstats()
- self.assertEqual(t, 2*N)
- self.assertEqual(c - oldc, 2*N)
+ self.assertEqual(t, N)
+ self.assertEqual(c - oldc, N)
self.assertEqual(nc - oldnc, 0)
# The A() trash should have been reclaimed already but the
@@ -974,8 +974,8 @@ class GCTests(unittest.TestCase):
zs.clear()
t = gc.collect()
c, nc = getstats()
- self.assertEqual(t, 4)
- self.assertEqual(c - oldc, 4)
+ self.assertEqual(t, 2)
+ self.assertEqual(c - oldc, 2)
self.assertEqual(nc - oldnc, 0)
gc.enable()
@@ -1128,8 +1128,7 @@ class GCCallbackTests(unittest.TestCase):
@cpython_only
def test_collect_garbage(self):
self.preclean()
- # Each of these cause four objects to be garbage: Two
- # Uncollectables and their instance dicts.
+ # Each of these cause two objects to be garbage:
Uncollectable()
Uncollectable()
C1055820(666)
@@ -1138,8 +1137,8 @@ class GCCallbackTests(unittest.TestCase):
if v[1] != "stop":
continue
info = v[2]
- self.assertEqual(info["collected"], 2)
- self.assertEqual(info["uncollectable"], 8)
+ self.assertEqual(info["collected"], 1)
+ self.assertEqual(info["uncollectable"], 4)
# We should now have the Uncollectables in gc.garbage
self.assertEqual(len(gc.garbage), 4)
@@ -1156,7 +1155,7 @@ class GCCallbackTests(unittest.TestCase):
continue
info = v[2]
self.assertEqual(info["collected"], 0)
- self.assertEqual(info["uncollectable"], 4)
+ self.assertEqual(info["uncollectable"], 2)
# Uncollectables should be gone
self.assertEqual(len(gc.garbage), 0)
diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py
index 93e39bc..2ce40fc 100644
--- a/Lib/test/test_sys.py
+++ b/Lib/test/test_sys.py
@@ -1409,7 +1409,7 @@ class SizeofTest(unittest.TestCase):
check((1,2,3), vsize('') + 3*self.P)
# type
# static type: PyTypeObject
- fmt = 'P2nPI13Pl4Pn9Pn11PIPP'
+ fmt = 'P2nPI13Pl4Pn9Pn12PIPP'
s = vsize(fmt)
check(int, s)
# class
@@ -1422,15 +1422,15 @@ class SizeofTest(unittest.TestCase):
'5P')
class newstyleclass(object): pass
# Separate block for PyDictKeysObject with 8 keys and 5 entries
- check(newstyleclass, s + calcsize(DICT_KEY_STRUCT_FORMAT) + 8 + 5*calcsize("n2P"))
+ check(newstyleclass, s + calcsize(DICT_KEY_STRUCT_FORMAT) + 32 + 21*calcsize("n2P"))
# dict with shared keys
- check(newstyleclass().__dict__, size('nQ2P') + 5*self.P)
+ check(newstyleclass().__dict__, size('nQ2P') + 15*self.P)
o = newstyleclass()
o.a = o.b = o.c = o.d = o.e = o.f = o.g = o.h = 1
# Separate block for PyDictKeysObject with 16 keys and 10 entries
- check(newstyleclass, s + calcsize(DICT_KEY_STRUCT_FORMAT) + 16 + 10*calcsize("n2P"))
+ check(newstyleclass, s + calcsize(DICT_KEY_STRUCT_FORMAT) + 32 + 21*calcsize("n2P"))
# dict with shared keys
- check(newstyleclass().__dict__, size('nQ2P') + 10*self.P)
+ check(newstyleclass().__dict__, size('nQ2P') + 13*self.P)
# unicode
# each tuple contains a string and its expected character size
# don't put any static strings here, as they may contain
diff --git a/Misc/NEWS.d/next/Core and Builtins/2021-10-08-09-47-38.bpo-45340.ukHgDb.rst b/Misc/NEWS.d/next/Core and Builtins/2021-10-08-09-47-38.bpo-45340.ukHgDb.rst
new file mode 100644
index 0000000..7760773
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2021-10-08-09-47-38.bpo-45340.ukHgDb.rst
@@ -0,0 +1,3 @@
+Object attributes are held in an array instead of a dictionary. An object's
+dictionary are created lazily, only when needed. Reduces the memory
+consumption of a typical Python object by about 30%. Patch by Mark Shannon.
diff --git a/Objects/dictobject.c b/Objects/dictobject.c
index 60470bf..3d6e4c1 100644
--- a/Objects/dictobject.c
+++ b/Objects/dictobject.c
@@ -634,7 +634,7 @@ new_values(Py_ssize_t size)
/* Consumes a reference to the keys object */
static PyObject *
-new_dict(PyDictKeysObject *keys, PyDictValues *values)
+new_dict(PyDictKeysObject *keys, PyDictValues *values, Py_ssize_t used, int free_values_on_failure)
{
PyDictObject *mp;
assert(keys != NULL);
@@ -653,7 +653,7 @@ new_dict(PyDictKeysObject *keys, PyDictValues *values)
mp = PyObject_GC_New(PyDictObject, &PyDict_Type);
if (mp == NULL) {
dictkeys_decref(keys);
- if (values != empty_values) {
+ if (free_values_on_failure) {
free_values(values);
}
return NULL;
@@ -661,12 +661,18 @@ new_dict(PyDictKeysObject *keys, PyDictValues *values)
}
mp->ma_keys = keys;
mp->ma_values = values;
- mp->ma_used = 0;
+ mp->ma_used = used;
mp->ma_version_tag = DICT_NEXT_VERSION();
ASSERT_CONSISTENT(mp);
return (PyObject *)mp;
}
+static inline Py_ssize_t
+shared_keys_usable_size(PyDictKeysObject *keys)
+{
+ return keys->dk_nentries + keys->dk_usable;
+}
+
/* Consumes a reference to the keys object */
static PyObject *
new_dict_with_shared_keys(PyDictKeysObject *keys)
@@ -674,7 +680,7 @@ new_dict_with_shared_keys(PyDictKeysObject *keys)
PyDictValues *values;
Py_ssize_t i, size;
- size = USABLE_FRACTION(DK_SIZE(keys));
+ size = shared_keys_usable_size(keys);
values = new_values(size);
if (values == NULL) {
dictkeys_decref(keys);
@@ -684,7 +690,7 @@ new_dict_with_shared_keys(PyDictKeysObject *keys)
for (i = 0; i < size; i++) {
values->values[i] = NULL;
}
- return new_dict(keys, values);
+ return new_dict(keys, values, 0, 1);
}
@@ -733,7 +739,7 @@ PyObject *
PyDict_New(void)
{
dictkeys_incref(Py_EMPTY_KEYS);
- return new_dict(Py_EMPTY_KEYS, empty_values);
+ return new_dict(Py_EMPTY_KEYS, empty_values, 0, 0);
}
/* Search index of hash table from offset of entry table */
@@ -998,6 +1004,40 @@ insertion_resize(PyDictObject *mp)
return dictresize(mp, calculate_log2_keysize(GROWTH_RATE(mp)));
}
+static int
+insert_into_dictkeys(PyDictKeysObject *keys, PyObject *name)
+{
+ assert(PyUnicode_CheckExact(name));
+ Py_hash_t hash = ((PyASCIIObject *)name)->hash;
+ if (hash == -1) {
+ hash = PyUnicode_Type.tp_hash(name);
+ if (hash == -1) {
+ PyErr_Clear();
+ return DKIX_EMPTY;
+ }
+ }
+ Py_ssize_t ix = dictkeys_stringlookup(keys, name, hash);
+ if (ix == DKIX_EMPTY) {
+ if (keys->dk_usable <= 0) {
+ return DKIX_EMPTY;
+ }
+ Py_INCREF(name);
+ /* Insert into new slot. */
+ keys->dk_version = 0;
+ Py_ssize_t hashpos = find_empty_slot(keys, hash);
+ ix = keys->dk_nentries;
+ PyDictKeyEntry *ep = &DK_ENTRIES(keys)[ix];
+ dictkeys_set_index(keys, hashpos, ix);
+ assert(ep->me_key == NULL);
+ ep->me_key = name;
+ ep->me_hash = hash;
+ keys->dk_usable--;
+ keys->dk_nentries++;
+ }
+ assert (ix < SHARED_KEYS_MAX_SIZE);
+ return (int)ix;
+}
+
/*
Internal routine to insert a new item into the table.
Used both by the internal resize routine and by the public insert routine.
@@ -1043,7 +1083,7 @@ insertdict(PyDictObject *mp, PyObject *key, Py_hash_t hash, PyObject *value)
Py_ssize_t index = mp->ma_keys->dk_nentries;
assert(index < SHARED_KEYS_MAX_SIZE);
assert((mp->ma_values->mv_order >> 60) == 0);
- mp->ma_values->mv_order = (mp->ma_values->mv_order)<<4 | index;
+ mp->ma_values->mv_order = ((mp->ma_values->mv_order)<<4) | index;
assert (mp->ma_values->values[index] == NULL);
mp->ma_values->values[index] = value;
}
@@ -1144,8 +1184,7 @@ actually be smaller than the old one.
If a table is split (its keys and hashes are shared, its values are not),
then the values are temporarily copied into the table, it is resized as
a combined table, then the me_value slots in the old table are NULLed out.
-After resizing a table is always combined,
-but can be resplit by make_keys_shared().
+After resizing a table is always combined.
*/
static int
dictresize(PyDictObject *mp, uint8_t log2_newsize)
@@ -1186,19 +1225,16 @@ dictresize(PyDictObject *mp, uint8_t log2_newsize)
if (oldvalues != NULL) {
/* Convert split table into new combined table.
* We must incref keys; we can transfer values.
- * Note that values of split table is always dense.
*/
for (Py_ssize_t i = 0; i < numentries; i++) {
- int index = oldvalues->mv_order >> ((numentries-1-i)*4) & 15;
- assert(oldvalues->values[index] != NULL);
+ int index = get_index_from_order(mp, i);
PyDictKeyEntry *ep = &oldentries[index];
- PyObject *key = ep->me_key;
- Py_INCREF(key);
- newentries[i].me_key = key;
+ assert(oldvalues->values[index] != NULL);
+ Py_INCREF(ep->me_key);
+ newentries[i].me_key = ep->me_key;
newentries[i].me_hash = ep->me_hash;
newentries[i].me_value = oldvalues->values[index];
}
-
dictkeys_decref(oldkeys);
mp->ma_values = NULL;
if (oldvalues != empty_values) {
@@ -1241,69 +1277,8 @@ dictresize(PyDictObject *mp, uint8_t log2_newsize)
build_indices(mp->ma_keys, newentries, numentries);
mp->ma_keys->dk_usable -= numentries;
mp->ma_keys->dk_nentries = numentries;
- return 0;
-}
-
-/* Returns NULL if unable to split table.
- * A NULL return does not necessarily indicate an error */
-static PyDictKeysObject *
-make_keys_shared(PyObject *op)
-{
- Py_ssize_t i;
- Py_ssize_t size;
- PyDictObject *mp = (PyDictObject *)op;
-
- if (!PyDict_CheckExact(op))
- return NULL;
- if (mp->ma_used > SHARED_KEYS_MAX_SIZE) {
- return NULL;
- }
- if (!_PyDict_HasSplitTable(mp)) {
- PyDictKeyEntry *ep0;
- PyDictValues *values;
- assert(mp->ma_keys->dk_refcnt == 1);
- if (mp->ma_keys->dk_kind == DICT_KEYS_GENERAL) {
- return NULL;
- }
- else if (mp->ma_used > mp->ma_keys->dk_nentries) {
- /* Remove dummy keys */
- if (dictresize(mp, DK_LOG_SIZE(mp->ma_keys)))
- return NULL;
- }
- assert(mp->ma_used == mp->ma_keys->dk_nentries);
- /* Copy values into a new array */
- ep0 = DK_ENTRIES(mp->ma_keys);
- size = USABLE_FRACTION(DK_SIZE(mp->ma_keys));
- values = new_values(size);
- if (values == NULL) {
- PyErr_SetString(PyExc_MemoryError,
- "Not enough memory to allocate new values array");
- return NULL;
- }
- uint64_t order = 0;
- for (i = 0; i < mp->ma_used; i++) {
- order <<= 4;
- order |= i;
- assert(ep0[i].me_value != NULL);
- values->values[i] = ep0[i].me_value;
- ep0[i].me_value = NULL;
- }
- values->mv_order = order;
- for (; i < size; i++) {
- assert(ep0[i].me_value == NULL);
- values->values[i] = NULL;
- ep0[i].me_value = NULL;
- }
- if (mp->ma_keys->dk_nentries + mp->ma_keys->dk_usable > SHARED_KEYS_MAX_SIZE) {
- assert(mp->ma_keys->dk_nentries <= SHARED_KEYS_MAX_SIZE);
- mp->ma_keys->dk_usable = SHARED_KEYS_MAX_SIZE - mp->ma_keys->dk_nentries;
- }
- mp->ma_keys->dk_kind = DICT_KEYS_SPLIT;
- mp->ma_values = values;
- }
- dictkeys_incref(mp->ma_keys);
ASSERT_CONSISTENT(mp);
- return mp->ma_keys;
+ return 0;
}
PyObject *
@@ -1331,7 +1306,7 @@ _PyDict_NewPresized(Py_ssize_t minused)
new_keys = new_keys_object(log2_newsize);
if (new_keys == NULL)
return NULL;
- return new_dict(new_keys, NULL);
+ return new_dict(new_keys, NULL, 0, 0);
}
/* Note that, for historical reasons, PyDict_GetItem() suppresses all errors
@@ -1503,6 +1478,9 @@ _PyDict_GetItemStringWithError(PyObject *v, const char *key)
/* Fast version of global value lookup (LOAD_GLOBAL).
* Lookup in globals, then builtins.
*
+ *
+ *
+ *
* Raise an exception and return NULL if an error occurred (ex: computing the
* key hash failed, key comparison failed, ...). Return NULL if the key doesn't
* exist. Return the value if the key exists.
@@ -1590,6 +1568,21 @@ _PyDict_SetItem_KnownHash(PyObject *op, PyObject *key, PyObject *value,
return insertdict(mp, key, hash, value);
}
+static uint64_t
+delete_index_from_order(uint64_t order, Py_ssize_t ix)
+{ /* Update order */
+ for (int i = 0;; i+= 4) {
+ assert (i < 64);
+ if (((order >> i) & 15) == (uint64_t)ix) {
+ /* Remove 4 bits at ith position */
+ uint64_t high = ((order>>i)>>4)<<i;
+ uint64_t low = order & ((((uint64_t)1)<<i)-1);
+ return high | low;
+ }
+ }
+ Py_UNREACHABLE();
+}
+
static int
delitem_common(PyDictObject *mp, Py_hash_t hash, Py_ssize_t ix,
PyObject *old_value)
@@ -1601,7 +1594,6 @@ delitem_common(PyDictObject *mp, Py_hash_t hash, Py_ssize_t ix,
assert(hashpos >= 0);
mp->ma_used--;
- mp->ma_keys->dk_version = 0;
mp->ma_version_tag = DICT_NEXT_VERSION();
ep = &DK_ENTRIES(mp->ma_keys)[ix];
if (mp->ma_values) {
@@ -1609,19 +1601,12 @@ delitem_common(PyDictObject *mp, Py_hash_t hash, Py_ssize_t ix,
mp->ma_values->values[ix] = NULL;
assert(ix < SHARED_KEYS_MAX_SIZE);
/* Update order */
- for (int i = 0;; i+= 4) {
- assert (i < 64);
- if (((mp->ma_values->mv_order >> i) & 15) == (uint64_t)ix) {
- /* Remove 4 bits at ith position */
- uint64_t order = mp->ma_values->mv_order;
- uint64_t high = ((order>>i)>>4)<<i;
- uint64_t low = order & ((((uint64_t)1)<<i)-1);
- mp->ma_values->mv_order = high | low;
- break;
- }
- }
+ mp->ma_values->mv_order =
+ delete_index_from_order(mp->ma_values->mv_order, ix);
+ ASSERT_CONSISTENT(mp);
}
else {
+ mp->ma_keys->dk_version = 0;
dictkeys_set_index(mp->ma_keys, hashpos, DKIX_DUMMY);
old_key = ep->me_key;
ep->me_key = NULL;
@@ -2692,7 +2677,7 @@ PyDict_Copy(PyObject *o)
if (_PyDict_HasSplitTable(mp)) {
PyDictObject *split_copy;
- Py_ssize_t size = USABLE_FRACTION(DK_SIZE(mp->ma_keys));
+ Py_ssize_t size = shared_keys_usable_size(mp->ma_keys);
PyDictValues *newvalues;
newvalues = new_values(size);
if (newvalues == NULL)
@@ -2740,7 +2725,7 @@ PyDict_Copy(PyObject *o)
if (keys == NULL) {
return NULL;
}
- PyDictObject *new = (PyDictObject *)new_dict(keys, NULL);
+ PyDictObject *new = (PyDictObject *)new_dict(keys, NULL, 0, 0);
if (new == NULL) {
/* In case of an error, `new_dict()` takes care of
cleaning up `keys`. */
@@ -2979,15 +2964,6 @@ PyDict_SetDefault(PyObject *d, PyObject *key, PyObject *defaultobj)
if (ix == DKIX_ERROR)
return NULL;
- if (_PyDict_HasSplitTable(mp) &&
- ((ix >= 0 && value == NULL && mp->ma_used != ix) ||
- (ix == DKIX_EMPTY && mp->ma_used != mp->ma_keys->dk_nentries))) {
- if (insertion_resize(mp) < 0) {
- return NULL;
- }
- ix = DKIX_EMPTY;
- }
-
if (ix == DKIX_EMPTY) {
mp->ma_keys->dk_version = 0;
PyDictKeyEntry *ep, *ep0;
@@ -3028,7 +3004,7 @@ PyDict_SetDefault(PyObject *d, PyObject *key, PyObject *defaultobj)
else if (value == NULL) {
value = defaultobj;
assert(_PyDict_HasSplitTable(mp));
- assert(ix == mp->ma_used);
+ assert(mp->ma_values->values[ix] == NULL);
Py_INCREF(value);
MAINTAIN_TRACKING(mp, key, value);
mp->ma_values->values[ix] = value;
@@ -3204,20 +3180,22 @@ static PyObject *dictiter_new(PyDictObject *, PyTypeObject *);
Py_ssize_t
_PyDict_SizeOf(PyDictObject *mp)
{
- Py_ssize_t size, usable, res;
+ Py_ssize_t size, res;
size = DK_SIZE(mp->ma_keys);
- usable = USABLE_FRACTION(size);
res = _PyObject_SIZE(Py_TYPE(mp));
- if (mp->ma_values)
- res += usable * sizeof(PyObject*);
+ if (mp->ma_values) {
+ res += shared_keys_usable_size(mp->ma_keys) * sizeof(PyObject*);
+ }
/* If the dictionary is split, the keys portion is accounted-for
in the type object. */
- if (mp->ma_keys->dk_refcnt == 1)
+ if (mp->ma_keys->dk_refcnt == 1) {
+ Py_ssize_t usable = USABLE_FRACTION(size);
res += (sizeof(PyDictKeysObject)
+ DK_IXSIZE(mp->ma_keys) * size
+ sizeof(PyDictKeyEntry) * usable);
+ }
return res;
}
@@ -4919,11 +4897,14 @@ dictvalues_reversed(_PyDictViewObject *dv, PyObject *Py_UNUSED(ignored))
PyDictKeysObject *
_PyDict_NewKeysForClass(void)
{
- PyDictKeysObject *keys = new_keys_object(PyDict_LOG_MINSIZE);
+ PyDictKeysObject *keys = new_keys_object(5); /* log2(32) */
if (keys == NULL) {
PyErr_Clear();
}
else {
+ assert(keys->dk_nentries == 0);
+ /* Set to max size+1 as it will shrink by one before each new object */
+ keys->dk_usable = SHARED_KEYS_MAX_SIZE;
keys->dk_kind = DICT_KEYS_SPLIT;
}
return keys;
@@ -4931,15 +4912,42 @@ _PyDict_NewKeysForClass(void)
#define CACHED_KEYS(tp) (((PyHeapTypeObject*)tp)->ht_cached_keys)
+static int
+init_inline_values(PyObject *obj, PyTypeObject *tp)
+{
+ assert(tp->tp_flags & Py_TPFLAGS_HEAPTYPE);
+ assert(tp->tp_dictoffset > 0);
+ assert(tp->tp_inline_values_offset > 0);
+ PyDictKeysObject *keys = CACHED_KEYS(tp);
+ assert(keys != NULL);
+ if (keys->dk_usable > 1) {
+ keys->dk_usable--;
+ }
+ Py_ssize_t size = shared_keys_usable_size(keys);
+ assert(size > 0);
+ PyDictValues *values = new_values(size);
+ if (values == NULL) {
+ PyErr_NoMemory();
+ return -1;
+ }
+ values->mv_order = 0;
+ for (int i = 0; i < size; i++) {
+ values->values[i] = NULL;
+ }
+ *((PyDictValues **)((char *)obj + tp->tp_inline_values_offset)) = values;
+ return 0;
+}
+
int
_PyObject_InitializeDict(PyObject *obj)
{
- PyObject **dictptr = _PyObject_GetDictPtr(obj);
- if (dictptr == NULL) {
+ PyTypeObject *tp = Py_TYPE(obj);
+ if (tp->tp_dictoffset == 0) {
return 0;
}
- assert(*dictptr == NULL);
- PyTypeObject *tp = Py_TYPE(obj);
+ if (tp->tp_inline_values_offset) {
+ return init_inline_values(obj, tp);
+ }
PyObject *dict;
if (_PyType_HasFeature(tp, Py_TPFLAGS_HEAPTYPE) && CACHED_KEYS(tp)) {
dictkeys_incref(CACHED_KEYS(tp));
@@ -4951,15 +4959,174 @@ _PyObject_InitializeDict(PyObject *obj)
if (dict == NULL) {
return -1;
}
+ PyObject **dictptr = _PyObject_DictPointer(obj);
*dictptr = dict;
return 0;
}
+static PyObject *
+make_dict_from_instance_attributes(PyDictKeysObject *keys, PyDictValues *values)
+{
+ dictkeys_incref(keys);
+ Py_ssize_t used = 0;
+ Py_ssize_t track = 0;
+ for (Py_ssize_t i = 0; i < shared_keys_usable_size(keys); i++) {
+ PyObject *val = values->values[i];
+ if (val != NULL) {
+ used += 1;
+ track += _PyObject_GC_MAY_BE_TRACKED(val);
+ }
+ }
+ PyObject *res = new_dict(keys, values, used, 0);
+ if (track && res) {
+ _PyObject_GC_TRACK(res);
+ }
+ return res;
+}
+
+PyObject *
+_PyObject_MakeDictFromInstanceAttributes(PyObject *obj, PyDictValues *values)
+{
+ assert(Py_TYPE(obj)->tp_inline_values_offset != 0);
+ PyDictKeysObject *keys = CACHED_KEYS(Py_TYPE(obj));
+ return make_dict_from_instance_attributes(keys, values);
+}
+
+int
+_PyObject_StoreInstanceAttribute(PyObject *obj, PyDictValues *values,
+ PyObject *name, PyObject *value)
+{
+ assert(PyUnicode_CheckExact(name));
+ PyTypeObject *tp = Py_TYPE(obj);
+ PyDictKeysObject *keys = CACHED_KEYS(Py_TYPE(obj));
+ assert(keys != NULL);
+ assert(values != NULL);
+ int ix = insert_into_dictkeys(keys, name);
+ if (ix == DKIX_EMPTY) {
+ if (value == NULL) {
+ PyErr_SetObject(PyExc_AttributeError, name);
+ return -1;
+ }
+ PyObject *dict = make_dict_from_instance_attributes(keys, values);
+ if (dict == NULL) {
+ return -1;
+ }
+ *((PyDictValues **)((char *)obj + tp->tp_inline_values_offset)) = NULL;
+ *((PyObject **) ((char *)obj + tp->tp_dictoffset)) = dict;
+ return PyDict_SetItem(dict, name, value);
+ }
+ PyObject *old_value = values->values[ix];
+ Py_XINCREF(value);
+ values->values[ix] = value;
+ if (old_value == NULL) {
+ if (value == NULL) {
+ PyErr_SetObject(PyExc_AttributeError, name);
+ return -1;
+ }
+ values->mv_order = (values->mv_order << 4) | ix;
+ }
+ else {
+ if (value == NULL) {
+ values->mv_order = delete_index_from_order(values->mv_order, ix);
+ }
+ Py_DECREF(old_value);
+ }
+ return 0;
+}
+
+PyObject *
+_PyObject_GetInstanceAttribute(PyObject *obj, PyDictValues *values,
+ PyObject *name)
+{
+ assert(PyUnicode_CheckExact(name));
+ PyDictKeysObject *keys = CACHED_KEYS(Py_TYPE(obj));
+ assert(keys != NULL);
+ Py_ssize_t ix = _PyDictKeys_StringLookup(keys, name);
+ if (ix == DKIX_EMPTY) {
+ return NULL;
+ }
+ PyObject *value = values->values[ix];
+ Py_XINCREF(value);
+ return value;
+}
+
+int
+_PyObject_IsInstanceDictEmpty(PyObject *obj)
+{
+ PyTypeObject *tp = Py_TYPE(obj);
+ if (tp->tp_dictoffset == 0) {
+ return 1;
+ }
+ PyDictValues **values_ptr = _PyObject_ValuesPointer(obj);
+ if (values_ptr && *values_ptr) {
+ PyDictKeysObject *keys = CACHED_KEYS(tp);
+ for (Py_ssize_t i = 0; i < keys->dk_nentries; i++) {
+ if ((*values_ptr)->values[i] != NULL) {
+ return 0;
+ }
+ }
+ return 1;
+ }
+ PyObject **dictptr = _PyObject_DictPointer(obj);
+ PyObject *dict = *dictptr;
+ if (dict == NULL) {
+ return 1;
+ }
+ return ((PyDictObject *)dict)->ma_used == 0;
+}
+
+
+int
+_PyObject_VisitInstanceAttributes(PyObject *self, visitproc visit, void *arg)
+{
+ PyTypeObject *tp = Py_TYPE(self);
+ assert(tp->tp_inline_values_offset);
+ PyDictValues **values_ptr = _PyObject_ValuesPointer(self);
+ if (*values_ptr == NULL) {
+ return 0;
+ }
+ PyDictKeysObject *keys = CACHED_KEYS(tp);
+ for (Py_ssize_t i = 0; i < keys->dk_nentries; i++) {
+ Py_VISIT((*values_ptr)->values[i]);
+ }
+ return 0;
+}
+
+void
+_PyObject_ClearInstanceAttributes(PyObject *self)
+{
+ PyTypeObject *tp = Py_TYPE(self);
+ assert(tp->tp_inline_values_offset);
+ PyDictValues **values_ptr = _PyObject_ValuesPointer(self);
+ if (*values_ptr == NULL) {
+ return;
+ }
+ PyDictKeysObject *keys = CACHED_KEYS(tp);
+ for (Py_ssize_t i = 0; i < keys->dk_nentries; i++) {
+ Py_CLEAR((*values_ptr)->values[i]);
+ }
+}
+
+void
+_PyObject_FreeInstanceAttributes(PyObject *self)
+{
+ PyTypeObject *tp = Py_TYPE(self);
+ assert(tp->tp_inline_values_offset);
+ PyDictValues **values_ptr = _PyObject_ValuesPointer(self);
+ if (*values_ptr == NULL) {
+ return;
+ }
+ PyDictKeysObject *keys = CACHED_KEYS(tp);
+ for (Py_ssize_t i = 0; i < keys->dk_nentries; i++) {
+ Py_XDECREF((*values_ptr)->values[i]);
+ }
+ free_values(*values_ptr);
+}
PyObject *
PyObject_GenericGetDict(PyObject *obj, void *context)
{
- PyObject **dictptr = _PyObject_GetDictPtr(obj);
+ PyObject **dictptr = _PyObject_DictPointer(obj);
if (dictptr == NULL) {
PyErr_SetString(PyExc_AttributeError,
"This object has no __dict__");
@@ -4968,7 +5135,14 @@ PyObject_GenericGetDict(PyObject *obj, void *context)
PyObject *dict = *dictptr;
if (dict == NULL) {
PyTypeObject *tp = Py_TYPE(obj);
- if (_PyType_HasFeature(tp, Py_TPFLAGS_HEAPTYPE) && CACHED_KEYS(tp)) {
+ PyDictValues **values_ptr = _PyObject_ValuesPointer(obj);
+ if (values_ptr && *values_ptr) {
+ *dictptr = dict = make_dict_from_instance_attributes(CACHED_KEYS(tp), *values_ptr);
+ if (dict != NULL) {
+ *values_ptr = NULL;
+ }
+ }
+ else if (_PyType_HasFeature(tp, Py_TPFLAGS_HEAPTYPE) && CACHED_KEYS(tp)) {
dictkeys_incref(CACHED_KEYS(tp));
*dictptr = dict = new_dict_with_shared_keys(CACHED_KEYS(tp));
}
@@ -5003,37 +5177,7 @@ _PyObjectDict_SetItem(PyTypeObject *tp, PyObject **dictptr,
res = PyDict_DelItem(dict, key);
}
else {
- int was_shared = (cached == ((PyDictObject *)dict)->ma_keys);
res = PyDict_SetItem(dict, key, value);
- if (was_shared &&
- (cached = CACHED_KEYS(tp)) != NULL &&
- cached != ((PyDictObject *)dict)->ma_keys &&
- cached->dk_nentries <= SHARED_KEYS_MAX_SIZE) {
- /* PyDict_SetItem() may call dictresize and convert split table
- * into combined table. In such case, convert it to split
- * table again and update type's shared key only when this is
- * the only dict sharing key with the type.
- *
- * This is to allow using shared key in class like this:
- *
- * class C:
- * def __init__(self):
- * # one dict resize happens
- * self.a, self.b, self.c = 1, 2, 3
- * self.d, self.e, self.f = 4, 5, 6
- * a = C()
- */
- if (cached->dk_refcnt == 1) {
- PyDictKeysObject *new_cached = make_keys_shared(dict);
- if (new_cached != NULL) {
- CACHED_KEYS(tp) = new_cached;
- dictkeys_decref(cached);
- }
- else if (PyErr_Occurred()) {
- return -1;
- }
- }
- }
}
} else {
dict = *dictptr;
diff --git a/Objects/object.c b/Objects/object.c
index 9d48346..14c85c2 100644
--- a/Objects/object.c
+++ b/Objects/object.c
@@ -5,6 +5,7 @@
#include "pycore_call.h" // _PyObject_CallNoArgs()
#include "pycore_ceval.h" // _Py_EnterRecursiveCall()
#include "pycore_context.h"
+#include "pycore_dict.h"
#include "pycore_initconfig.h" // _PyStatus_EXCEPTION()
#include "pycore_object.h" // _PyType_CheckConsistency()
#include "pycore_pyerrors.h" // _PyErr_Occurred()
@@ -1065,10 +1066,8 @@ PyObject_SetAttr(PyObject *v, PyObject *name, PyObject *value)
return -1;
}
-/* Helper to get a pointer to an object's __dict__ slot, if any */
-
PyObject **
-_PyObject_GetDictPtr(PyObject *obj)
+_PyObject_DictPointer(PyObject *obj)
{
Py_ssize_t dictoffset;
PyTypeObject *tp = Py_TYPE(obj);
@@ -1090,6 +1089,35 @@ _PyObject_GetDictPtr(PyObject *obj)
return (PyObject **) ((char *)obj + dictoffset);
}
+/* Helper to get a pointer to an object's __dict__ slot, if any.
+ * Creates the dict from inline attributes if necessary.
+ * Does not set an exception. */
+PyObject **
+_PyObject_GetDictPtr(PyObject *obj)
+{
+ PyObject **dict_ptr = _PyObject_DictPointer(obj);
+ if (dict_ptr == NULL) {
+ return NULL;
+ }
+ if (*dict_ptr != NULL) {
+ return dict_ptr;
+ }
+ PyDictValues **values_ptr = _PyObject_ValuesPointer(obj);
+ if (values_ptr == NULL || *values_ptr == NULL) {
+ return dict_ptr;
+ }
+ PyObject *dict = _PyObject_MakeDictFromInstanceAttributes(obj, *values_ptr);
+ if (dict == NULL) {
+ PyErr_Clear();
+ return NULL;
+ }
+ assert(*dict_ptr == NULL);
+ assert(*values_ptr != NULL);
+ *values_ptr = NULL;
+ *dict_ptr = dict;
+ return dict_ptr;
+}
+
PyObject *
PyObject_SelfIter(PyObject *obj)
{
@@ -1136,7 +1164,7 @@ _PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method)
}
}
- if (tp->tp_getattro != PyObject_GenericGetAttr || !PyUnicode_Check(name)) {
+ if (tp->tp_getattro != PyObject_GenericGetAttr || !PyUnicode_CheckExact(name)) {
*method = PyObject_GetAttr(obj, name);
return 0;
}
@@ -1156,23 +1184,34 @@ _PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method)
}
}
}
-
- PyObject **dictptr = _PyObject_GetDictPtr(obj);
- PyObject *dict;
- if (dictptr != NULL && (dict = *dictptr) != NULL) {
- Py_INCREF(dict);
- PyObject *attr = PyDict_GetItemWithError(dict, name);
+ PyDictValues **values_ptr = _PyObject_ValuesPointer(obj);
+ if (values_ptr && *values_ptr) {
+ assert(*_PyObject_DictPointer(obj) == NULL);
+ PyObject *attr = _PyObject_GetInstanceAttribute(obj, *values_ptr, name);
if (attr != NULL) {
- *method = Py_NewRef(attr);
- Py_DECREF(dict);
+ *method = attr;
Py_XDECREF(descr);
return 0;
}
- Py_DECREF(dict);
+ }
+ else {
+ PyObject **dictptr = _PyObject_DictPointer(obj);
+ PyObject *dict;
+ if (dictptr != NULL && (dict = *dictptr) != NULL) {
+ Py_INCREF(dict);
+ PyObject *attr = PyDict_GetItemWithError(dict, name);
+ if (attr != NULL) {
+ *method = Py_NewRef(attr);
+ Py_DECREF(dict);
+ Py_XDECREF(descr);
+ return 0;
+ }
+ Py_DECREF(dict);
- if (PyErr_Occurred()) {
- Py_XDECREF(descr);
- return 0;
+ if (PyErr_Occurred()) {
+ Py_XDECREF(descr);
+ return 0;
+ }
}
}
@@ -1200,6 +1239,17 @@ _PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method)
return 0;
}
+PyDictValues **
+_PyObject_ValuesPointer(PyObject *obj)
+{
+ PyTypeObject *tp = Py_TYPE(obj);
+ Py_ssize_t offset = tp->tp_inline_values_offset;
+ if (offset == 0) {
+ return NULL;
+ }
+ return (PyDictValues **) ((char *)obj + offset);
+}
+
/* Generic GetAttr functions - put these in your tp_[gs]etattro slot. */
PyObject *
@@ -1247,25 +1297,46 @@ _PyObject_GenericGetAttrWithDict(PyObject *obj, PyObject *name,
goto done;
}
}
-
if (dict == NULL) {
- /* Inline _PyObject_GetDictPtr */
- dictoffset = tp->tp_dictoffset;
- if (dictoffset != 0) {
- if (dictoffset < 0) {
- Py_ssize_t tsize = Py_SIZE(obj);
- if (tsize < 0) {
- tsize = -tsize;
+ PyDictValues **values_ptr = _PyObject_ValuesPointer(obj);
+ if (values_ptr && *values_ptr) {
+ if (PyUnicode_CheckExact(name)) {
+ assert(*_PyObject_DictPointer(obj) == NULL);
+ res = _PyObject_GetInstanceAttribute(obj, *values_ptr, name);
+ if (res != NULL) {
+ goto done;
}
- size_t size = _PyObject_VAR_SIZE(tp, tsize);
- _PyObject_ASSERT(obj, size <= PY_SSIZE_T_MAX);
-
- dictoffset += (Py_ssize_t)size;
- _PyObject_ASSERT(obj, dictoffset > 0);
- _PyObject_ASSERT(obj, dictoffset % SIZEOF_VOID_P == 0);
}
- dictptr = (PyObject **) ((char *)obj + dictoffset);
- dict = *dictptr;
+ else {
+ dictptr = _PyObject_DictPointer(obj);
+ assert(dictptr != NULL && *dictptr == NULL);
+ *dictptr = dict = _PyObject_MakeDictFromInstanceAttributes(obj, *values_ptr);
+ if (dict == NULL) {
+ res = NULL;
+ goto done;
+ }
+ *values_ptr = NULL;
+ }
+ }
+ else {
+ /* Inline _PyObject_DictPointer */
+ dictoffset = tp->tp_dictoffset;
+ if (dictoffset != 0) {
+ if (dictoffset < 0) {
+ Py_ssize_t tsize = Py_SIZE(obj);
+ if (tsize < 0) {
+ tsize = -tsize;
+ }
+ size_t size = _PyObject_VAR_SIZE(tp, tsize);
+ _PyObject_ASSERT(obj, size <= PY_SSIZE_T_MAX);
+
+ dictoffset += (Py_ssize_t)size;
+ _PyObject_ASSERT(obj, dictoffset > 0);
+ _PyObject_ASSERT(obj, dictoffset % SIZEOF_VOID_P == 0);
+ }
+ dictptr = (PyObject **) ((char *)obj + dictoffset);
+ dict = *dictptr;
+ }
}
}
if (dict != NULL) {
@@ -1328,7 +1399,6 @@ _PyObject_GenericSetAttrWithDict(PyObject *obj, PyObject *name,
PyTypeObject *tp = Py_TYPE(obj);
PyObject *descr;
descrsetfunc f;
- PyObject **dictptr;
int res = -1;
if (!PyUnicode_Check(name)){
@@ -1354,30 +1424,30 @@ _PyObject_GenericSetAttrWithDict(PyObject *obj, PyObject *name,
}
}
- /* XXX [Steve Dower] These are really noisy - worth it? */
- /*if (PyType_Check(obj) || PyModule_Check(obj)) {
- if (value && PySys_Audit("object.__setattr__", "OOO", obj, name, value) < 0)
- return -1;
- if (!value && PySys_Audit("object.__delattr__", "OO", obj, name) < 0)
- return -1;
- }*/
-
if (dict == NULL) {
- dictptr = _PyObject_GetDictPtr(obj);
- if (dictptr == NULL) {
- if (descr == NULL) {
- PyErr_Format(PyExc_AttributeError,
- "'%.100s' object has no attribute '%U'",
- tp->tp_name, name);
+ PyDictValues **values_ptr = _PyObject_ValuesPointer(obj);
+ if (values_ptr && *values_ptr) {
+ res = _PyObject_StoreInstanceAttribute(obj, *values_ptr, name, value);
+ }
+ else {
+ PyObject **dictptr = _PyObject_DictPointer(obj);
+ if (dictptr == NULL) {
+ if (descr == NULL) {
+ PyErr_Format(PyExc_AttributeError,
+ "'%.100s' object has no attribute '%U'",
+ tp->tp_name, name);
+ }
+ else {
+ PyErr_Format(PyExc_AttributeError,
+ "'%.50s' object attribute '%U' is read-only",
+ tp->tp_name, name);
+ }
+ goto done;
}
else {
- PyErr_Format(PyExc_AttributeError,
- "'%.50s' object attribute '%U' is read-only",
- tp->tp_name, name);
+ res = _PyObjectDict_SetItem(tp, dictptr, name, value);
}
- goto done;
}
- res = _PyObjectDict_SetItem(tp, dictptr, name, value);
}
else {
Py_INCREF(dict);
@@ -1407,8 +1477,15 @@ PyObject_GenericSetDict(PyObject *obj, PyObject *value, void *context)
{
PyObject **dictptr = _PyObject_GetDictPtr(obj);
if (dictptr == NULL) {
- PyErr_SetString(PyExc_AttributeError,
- "This object has no __dict__");
+ PyDictValues** values_ptr = _PyObject_ValuesPointer(obj);
+ if (values_ptr != NULL && *values_ptr != NULL) {
+ /* Was unable to convert to dict */
+ PyErr_NoMemory();
+ }
+ else {
+ PyErr_SetString(PyExc_AttributeError,
+ "This object has no __dict__");
+ }
return -1;
}
if (value == NULL) {
diff --git a/Objects/typeobject.c b/Objects/typeobject.c
index 544e8a4..aa07333 100644
--- a/Objects/typeobject.c
+++ b/Objects/typeobject.c
@@ -1232,8 +1232,16 @@ subtype_traverse(PyObject *self, visitproc visit, void *arg)
assert(base);
}
+ if (type->tp_inline_values_offset) {
+ assert(type->tp_dictoffset);
+ int err = _PyObject_VisitInstanceAttributes(self, visit, arg);
+ if (err) {
+ return err;
+ }
+ }
+
if (type->tp_dictoffset != base->tp_dictoffset) {
- PyObject **dictptr = _PyObject_GetDictPtr(self);
+ PyObject **dictptr = _PyObject_DictPointer(self);
if (dictptr && *dictptr)
Py_VISIT(*dictptr);
}
@@ -1293,8 +1301,11 @@ subtype_clear(PyObject *self)
/* Clear the instance dict (if any), to break cycles involving only
__dict__ slots (as in the case 'self.__dict__ is self'). */
+ if (type->tp_inline_values_offset) {
+ _PyObject_ClearInstanceAttributes(self);
+ }
if (type->tp_dictoffset != base->tp_dictoffset) {
- PyObject **dictptr = _PyObject_GetDictPtr(self);
+ PyObject **dictptr = _PyObject_DictPointer(self);
if (dictptr && *dictptr)
Py_CLEAR(*dictptr);
}
@@ -1433,9 +1444,12 @@ subtype_dealloc(PyObject *self)
assert(base);
}
- /* If we added a dict, DECREF it */
+ /* If we added a dict, DECREF it, or free inline values. */
+ if (type->tp_inline_values_offset) {
+ _PyObject_FreeInstanceAttributes(self);
+ }
if (type->tp_dictoffset && !base->tp_dictoffset) {
- PyObject **dictptr = _PyObject_GetDictPtr(self);
+ PyObject **dictptr = _PyObject_DictPointer(self);
if (dictptr != NULL) {
PyObject *dict = *dictptr;
if (dict != NULL) {
@@ -2159,7 +2173,6 @@ mro_internal(PyTypeObject *type, PyObject **p_old_mro)
return 1;
}
-
/* Calculate the best base amongst multiple base classes.
This is the first one that's on the path to the "solid base". */
@@ -2230,6 +2243,10 @@ extra_ivars(PyTypeObject *type, PyTypeObject *base)
return t_size != b_size ||
type->tp_itemsize != base->tp_itemsize;
}
+ if (type->tp_inline_values_offset && base->tp_inline_values_offset == 0 &&
+ type->tp_inline_values_offset + sizeof(PyDictValues *) == t_size &&
+ type->tp_flags & Py_TPFLAGS_HEAPTYPE)
+ t_size -= sizeof(PyDictValues *);
if (type->tp_weaklistoffset && base->tp_weaklistoffset == 0 &&
type->tp_weaklistoffset + sizeof(PyObject *) == t_size &&
type->tp_flags & Py_TPFLAGS_HEAPTYPE)
@@ -2238,7 +2255,6 @@ extra_ivars(PyTypeObject *type, PyTypeObject *base)
type->tp_dictoffset + sizeof(PyObject *) == t_size &&
type->tp_flags & Py_TPFLAGS_HEAPTYPE)
t_size -= sizeof(PyObject *);
-
return t_size != b_size;
}
@@ -2258,6 +2274,7 @@ solid_base(PyTypeObject *type)
}
static void object_dealloc(PyObject *);
+static PyObject *object_new(PyTypeObject *, PyObject *, PyObject *);
static int object_init(PyObject *, PyObject *, PyObject *);
static int update_slot(PyTypeObject *, PyObject *);
static void fixup_slot_dispatchers(PyTypeObject *);
@@ -2979,6 +2996,13 @@ type_new_descriptors(const type_new_ctx *ctx, PyTypeObject *type)
type->tp_weaklistoffset = slotoffset;
slotoffset += sizeof(PyObject *);
}
+ if (type->tp_dictoffset > 0) {
+ type->tp_inline_values_offset = slotoffset;
+ slotoffset += sizeof(PyDictValues *);
+ }
+ else {
+ type->tp_inline_values_offset = 0;
+ }
type->tp_basicsize = slotoffset;
type->tp_itemsize = ctx->base->tp_itemsize;
@@ -3181,7 +3205,8 @@ type_new_impl(type_new_ctx *ctx)
// Put the proper slots in place
fixup_slot_dispatchers(type);
- if (type->tp_dictoffset) {
+ if (type->tp_inline_values_offset) {
+ assert(type->tp_dictoffset > 0);
PyHeapTypeObject *et = (PyHeapTypeObject*)type;
et->ht_cached_keys = _PyDict_NewKeysForClass();
}
@@ -3195,6 +3220,7 @@ type_new_impl(type_new_ctx *ctx)
}
assert(_PyType_CheckConsistency(type));
+
return (PyObject *)type;
error:
@@ -3550,7 +3576,8 @@ PyType_FromModuleAndSpec(PyObject *module, PyType_Spec *spec, PyObject *bases)
if (PyType_Ready(type) < 0)
goto fail;
- if (type->tp_dictoffset) {
+ if (type->tp_inline_values_offset) {
+ assert(type->tp_dictoffset > 0);
res->ht_cached_keys = _PyDict_NewKeysForClass();
}
@@ -4257,7 +4284,6 @@ type_traverse(PyTypeObject *type, visitproc visit, void *arg)
static int
type_clear(PyTypeObject *type)
{
- PyDictKeysObject *cached_keys;
/* Because of type_is_gc(), the collector only calls this
for heaptypes. */
_PyObject_ASSERT((PyObject *)type, type->tp_flags & Py_TPFLAGS_HEAPTYPE);
@@ -4292,11 +4318,6 @@ type_clear(PyTypeObject *type)
*/
PyType_Modified(type);
- cached_keys = ((PyHeapTypeObject *)type)->ht_cached_keys;
- if (cached_keys != NULL) {
- ((PyHeapTypeObject *)type)->ht_cached_keys = NULL;
- _PyDictKeys_DecRef(cached_keys);
- }
if (type->tp_dict) {
PyDict_Clear(type->tp_dict);
}
@@ -4618,6 +4639,7 @@ compatible_with_tp_base(PyTypeObject *child)
child->tp_itemsize == parent->tp_itemsize &&
child->tp_dictoffset == parent->tp_dictoffset &&
child->tp_weaklistoffset == parent->tp_weaklistoffset &&
+ child->tp_inline_values_offset == parent->tp_inline_values_offset &&
((child->tp_flags & Py_TPFLAGS_HAVE_GC) ==
(parent->tp_flags & Py_TPFLAGS_HAVE_GC)) &&
(child->tp_dealloc == subtype_dealloc ||
@@ -4637,6 +4659,8 @@ same_slots_added(PyTypeObject *a, PyTypeObject *b)
size += sizeof(PyObject *);
if (a->tp_weaklistoffset == size && b->tp_weaklistoffset == size)
size += sizeof(PyObject *);
+ if (a->tp_inline_values_offset == size && b->tp_inline_values_offset == size)
+ size += sizeof(PyObject *);
/* Check slots compliance */
if (!(a->tp_flags & Py_TPFLAGS_HEAPTYPE) ||
@@ -4781,6 +4805,17 @@ object_set_class(PyObject *self, PyObject *value, void *closure)
}
if (compatible_for_assignment(oldto, newto, "__class__")) {
+ /* Changing the class will change the implicit dict keys,
+ * so we must materialize the dictionary first. */
+ assert(oldto->tp_inline_values_offset == newto->tp_inline_values_offset);
+ _PyObject_GetDictPtr(self);
+ PyDictValues** values_ptr = _PyObject_ValuesPointer(self);
+ if (values_ptr != NULL && *values_ptr != NULL) {
+ /* Was unable to convert to dict */
+ PyErr_NoMemory();
+ return -1;
+ }
+ assert(_PyObject_ValuesPointer(self) == NULL || *_PyObject_ValuesPointer(self) == NULL);
if (newto->tp_flags & Py_TPFLAGS_HEAPTYPE) {
Py_INCREF(newto);
}
@@ -4906,23 +4941,16 @@ _PyObject_GetState(PyObject *obj, int required)
Py_TYPE(obj)->tp_name);
return NULL;
}
-
- {
- PyObject **dict;
- dict = _PyObject_GetDictPtr(obj);
- /* It is possible that the object's dict is not initialized
- yet. In this case, we will return None for the state.
- We also return None if the dict is empty to make the behavior
- consistent regardless whether the dict was initialized or not.
- This make unit testing easier. */
- if (dict != NULL && *dict != NULL && PyDict_GET_SIZE(*dict)) {
- state = *dict;
- }
- else {
- state = Py_None;
- }
+ if (_PyObject_IsInstanceDictEmpty(obj)) {
+ state = Py_None;
Py_INCREF(state);
}
+ else {
+ state = PyObject_GenericGetDict(obj, NULL);
+ if (state == NULL) {
+ return NULL;
+ }
+ }
slotnames = _PyType_GetSlotNames(Py_TYPE(obj));
if (slotnames == NULL) {
@@ -4933,12 +4961,18 @@ _PyObject_GetState(PyObject *obj, int required)
assert(slotnames == Py_None || PyList_Check(slotnames));
if (required) {
Py_ssize_t basicsize = PyBaseObject_Type.tp_basicsize;
- if (Py_TYPE(obj)->tp_dictoffset)
+ if (Py_TYPE(obj)->tp_dictoffset) {
basicsize += sizeof(PyObject *);
- if (Py_TYPE(obj)->tp_weaklistoffset)
+ }
+ if (Py_TYPE(obj)->tp_weaklistoffset) {
basicsize += sizeof(PyObject *);
- if (slotnames != Py_None)
+ }
+ if (Py_TYPE(obj)->tp_inline_values_offset) {
+ basicsize += sizeof(PyDictValues *);
+ }
+ if (slotnames != Py_None) {
basicsize += sizeof(PyObject *) * PyList_GET_SIZE(slotnames);
+ }
if (Py_TYPE(obj)->tp_basicsize > basicsize) {
Py_DECREF(slotnames);
Py_DECREF(state);
@@ -5708,6 +5742,7 @@ inherit_special(PyTypeObject *type, PyTypeObject *base)
COPYVAL(tp_itemsize);
COPYVAL(tp_weaklistoffset);
COPYVAL(tp_dictoffset);
+ COPYVAL(tp_inline_values_offset);
#undef COPYVAL
/* Setup fast subclass flags */
diff --git a/Python/ceval.c b/Python/ceval.c
index f4cacd8..5cf2ab3 100644
--- a/Python/ceval.c
+++ b/Python/ceval.c
@@ -3618,7 +3618,7 @@ check_eval_breaker:
}
}
- TARGET(LOAD_ATTR_SPLIT_KEYS) {
+ TARGET(LOAD_ATTR_INSTANCE_VALUE) {
assert(cframe.use_tracing == 0);
PyObject *owner = TOP();
PyObject *res;
@@ -3629,11 +3629,10 @@ check_eval_breaker:
assert(cache1->tp_version != 0);
DEOPT_IF(tp->tp_version_tag != cache1->tp_version, LOAD_ATTR);
assert(tp->tp_dictoffset > 0);
- PyDictObject *dict = *(PyDictObject **)(((char *)owner) + tp->tp_dictoffset);
- DEOPT_IF(dict == NULL, LOAD_ATTR);
- assert(PyDict_CheckExact((PyObject *)dict));
- DEOPT_IF(dict->ma_keys->dk_version != cache1->dk_version_or_hint, LOAD_ATTR);
- res = dict->ma_values->values[cache0->index];
+ assert(tp->tp_inline_values_offset > 0);
+ PyDictValues *values = *(PyDictValues **)(((char *)owner) + tp->tp_inline_values_offset);
+ DEOPT_IF(values == NULL, LOAD_ATTR);
+ res = values->values[cache0->index];
DEOPT_IF(res == NULL, LOAD_ATTR);
STAT_INC(LOAD_ATTR, hit);
record_cache_hit(cache0);
@@ -3725,7 +3724,7 @@ check_eval_breaker:
}
}
- TARGET(STORE_ATTR_SPLIT_KEYS) {
+ TARGET(STORE_ATTR_INSTANCE_VALUE) {
assert(cframe.use_tracing == 0);
PyObject *owner = TOP();
PyTypeObject *tp = Py_TYPE(owner);
@@ -3735,31 +3734,23 @@ check_eval_breaker:
assert(cache1->tp_version != 0);
DEOPT_IF(tp->tp_version_tag != cache1->tp_version, STORE_ATTR);
assert(tp->tp_dictoffset > 0);
- PyDictObject *dict = *(PyDictObject **)(((char *)owner) + tp->tp_dictoffset);
- DEOPT_IF(dict == NULL, STORE_ATTR);
- assert(PyDict_CheckExact((PyObject *)dict));
- DEOPT_IF(dict->ma_keys->dk_version != cache1->dk_version_or_hint, STORE_ATTR);
+ assert(tp->tp_inline_values_offset > 0);
+ PyDictValues *values = *(PyDictValues **)(((char *)owner) + tp->tp_inline_values_offset);
+ DEOPT_IF(values == NULL, STORE_ATTR);
STAT_INC(STORE_ATTR, hit);
record_cache_hit(cache0);
int index = cache0->index;
STACK_SHRINK(1);
PyObject *value = POP();
- PyObject *old_value = dict->ma_values->values[index];
- dict->ma_values->values[index] = value;
+ PyObject *old_value = values->values[index];
+ values->values[index] = value;
if (old_value == NULL) {
assert(index < 16);
- dict->ma_values->mv_order = (dict->ma_values->mv_order << 4) | index;
- dict->ma_used++;
+ values->mv_order = (values->mv_order << 4) | index;
}
else {
Py_DECREF(old_value);
}
- /* Ensure dict is GC tracked if it needs to be */
- if (!_PyObject_GC_IS_TRACKED(dict) && _PyObject_GC_MAY_BE_TRACKED(value)) {
- _PyObject_GC_TRACK(dict);
- }
- /* PEP 509 */
- dict->ma_version_tag = DICT_NEXT_VERSION();
Py_DECREF(owner);
DISPATCH();
}
@@ -4474,21 +4465,31 @@ check_eval_breaker:
_PyObjectCache *cache2 = &caches[-2].obj;
DEOPT_IF(self_cls->tp_version_tag != cache1->tp_version, LOAD_METHOD);
- assert(cache1->dk_version_or_hint != 0);
- assert(cache1->tp_version != 0);
- assert(self_cls->tp_dictoffset >= 0);
- assert(Py_TYPE(self_cls)->tp_dictoffset > 0);
-
- // inline version of _PyObject_GetDictPtr for offset >= 0
- PyObject *dict = self_cls->tp_dictoffset != 0 ?
- *(PyObject **) ((char *)self + self_cls->tp_dictoffset) : NULL;
-
- // Ensure self.__dict__ didn't modify keys.
- // Don't care if self has no dict, it could be builtin or __slots__.
- DEOPT_IF(dict != NULL &&
- ((PyDictObject *)dict)->ma_keys->dk_version !=
- cache1->dk_version_or_hint, LOAD_METHOD);
+ assert(self_cls->tp_dictoffset > 0);
+ assert(self_cls->tp_inline_values_offset > 0);
+ PyDictObject *dict = *(PyDictObject **)(((char *)self) + self_cls->tp_dictoffset);
+ DEOPT_IF(dict != NULL, LOAD_METHOD);
+ DEOPT_IF(((PyHeapTypeObject *)self_cls)->ht_cached_keys->dk_version != cache1->dk_version_or_hint, LOAD_METHOD);
+ STAT_INC(LOAD_METHOD, hit);
+ record_cache_hit(cache0);
+ PyObject *res = cache2->obj;
+ assert(res != NULL);
+ assert(_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR));
+ Py_INCREF(res);
+ SET_TOP(res);
+ PUSH(self);
+ DISPATCH();
+ }
+ TARGET(LOAD_METHOD_NO_DICT) {
+ PyObject *self = TOP();
+ PyTypeObject *self_cls = Py_TYPE(self);
+ SpecializedCacheEntry *caches = GET_CACHE();
+ _PyAdaptiveEntry *cache0 = &caches[0].adaptive;
+ _PyAttrCache *cache1 = &caches[-1].attr;
+ _PyObjectCache *cache2 = &caches[-2].obj;
+ DEOPT_IF(self_cls->tp_version_tag != cache1->tp_version, LOAD_METHOD);
+ assert(self_cls->tp_dictoffset == 0);
STAT_INC(LOAD_METHOD, hit);
record_cache_hit(cache0);
PyObject *res = cache2->obj;
@@ -4530,7 +4531,6 @@ check_eval_breaker:
record_cache_hit(cache0);
PyObject *res = cache2->obj;
assert(res != NULL);
- assert(_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR));
Py_INCREF(res);
SET_TOP(NULL);
Py_DECREF(cls);
diff --git a/Python/opcode_targets.h b/Python/opcode_targets.h
index 30df683..773f925 100644
--- a/Python/opcode_targets.h
+++ b/Python/opcode_targets.h
@@ -41,7 +41,7 @@ static void *opcode_targets[256] = {
&&TARGET_BINARY_SUBSCR_DICT,
&&TARGET_JUMP_ABSOLUTE_QUICK,
&&TARGET_LOAD_ATTR_ADAPTIVE,
- &&TARGET_LOAD_ATTR_SPLIT_KEYS,
+ &&TARGET_LOAD_ATTR_INSTANCE_VALUE,
&&TARGET_LOAD_ATTR_WITH_HINT,
&&TARGET_LOAD_ATTR_SLOT,
&&TARGET_LOAD_ATTR_MODULE,
@@ -87,7 +87,7 @@ static void *opcode_targets[256] = {
&&TARGET_SETUP_ANNOTATIONS,
&&TARGET_YIELD_VALUE,
&&TARGET_LOAD_METHOD_MODULE,
- &&TARGET_STORE_ATTR_ADAPTIVE,
+ &&TARGET_LOAD_METHOD_NO_DICT,
&&TARGET_POP_EXCEPT,
&&TARGET_STORE_NAME,
&&TARGET_DELETE_NAME,
@@ -119,36 +119,36 @@ static void *opcode_targets[256] = {
&&TARGET_IS_OP,
&&TARGET_CONTAINS_OP,
&&TARGET_RERAISE,
- &&TARGET_STORE_ATTR_SPLIT_KEYS,
+ &&TARGET_STORE_ATTR_ADAPTIVE,
&&TARGET_JUMP_IF_NOT_EXC_MATCH,
+ &&TARGET_STORE_ATTR_INSTANCE_VALUE,
&&TARGET_STORE_ATTR_SLOT,
- &&TARGET_STORE_ATTR_WITH_HINT,
&&TARGET_LOAD_FAST,
&&TARGET_STORE_FAST,
&&TARGET_DELETE_FAST,
+ &&TARGET_STORE_ATTR_WITH_HINT,
&&TARGET_LOAD_FAST__LOAD_FAST,
- &&TARGET_STORE_FAST__LOAD_FAST,
&&TARGET_GEN_START,
&&TARGET_RAISE_VARARGS,
&&TARGET_CALL_FUNCTION,
&&TARGET_MAKE_FUNCTION,
&&TARGET_BUILD_SLICE,
- &&TARGET_LOAD_FAST__LOAD_CONST,
+ &&TARGET_STORE_FAST__LOAD_FAST,
&&TARGET_MAKE_CELL,
&&TARGET_LOAD_CLOSURE,
&&TARGET_LOAD_DEREF,
&&TARGET_STORE_DEREF,
&&TARGET_DELETE_DEREF,
- &&TARGET_LOAD_CONST__LOAD_FAST,
+ &&TARGET_LOAD_FAST__LOAD_CONST,
&&TARGET_CALL_FUNCTION_KW,
&&TARGET_CALL_FUNCTION_EX,
- &&TARGET_STORE_FAST__STORE_FAST,
+ &&TARGET_LOAD_CONST__LOAD_FAST,
&&TARGET_EXTENDED_ARG,
&&TARGET_LIST_APPEND,
&&TARGET_SET_ADD,
&&TARGET_MAP_ADD,
&&TARGET_LOAD_CLASSDEREF,
- &&_unknown_opcode,
+ &&TARGET_STORE_FAST__STORE_FAST,
&&_unknown_opcode,
&&_unknown_opcode,
&&TARGET_MATCH_CLASS,
diff --git a/Python/specialize.c b/Python/specialize.c
index 4e025384..6efee76 100644
--- a/Python/specialize.c
+++ b/Python/specialize.c
@@ -4,6 +4,7 @@
#include "pycore_dict.h"
#include "pycore_long.h"
#include "pycore_moduleobject.h"
+#include "pycore_object.h"
#include "opcode.h"
#include "structmember.h" // struct PyMemberDef, T_OFFSET_EX
@@ -462,6 +463,7 @@ specialize_module_load_attr(
PyObject *value = NULL;
PyObject *getattr;
_Py_IDENTIFIER(__getattr__);
+ assert(owner->ob_type->tp_inline_values_offset == 0);
PyDictObject *dict = (PyDictObject *)m->md_dict;
if (dict == NULL) {
SPECIALIZATION_FAIL(opcode, SPEC_FAIL_NO_DICT);
@@ -584,7 +586,7 @@ specialize_dict_access(
PyObject *owner, _Py_CODEUNIT *instr, PyTypeObject *type,
DesciptorClassification kind, PyObject *name,
_PyAdaptiveEntry *cache0, _PyAttrCache *cache1,
- int base_op, int split_op, int hint_op)
+ int base_op, int values_op, int hint_op)
{
assert(kind == NON_OVERRIDING || kind == NON_DESCRIPTOR || kind == ABSENT ||
kind == BUILTIN_CLASSMETHOD || kind == PYTHON_CLASSMETHOD);
@@ -595,17 +597,11 @@ specialize_dict_access(
}
if (type->tp_dictoffset > 0) {
PyObject **dictptr = (PyObject **) ((char *)owner + type->tp_dictoffset);
- if (*dictptr == NULL || !PyDict_CheckExact(*dictptr)) {
- SPECIALIZATION_FAIL(base_op, SPEC_FAIL_NO_DICT);
- return 0;
- }
- // We found an instance with a __dict__.
PyDictObject *dict = (PyDictObject *)*dictptr;
- PyDictKeysObject *keys = dict->ma_keys;
- if ((type->tp_flags & Py_TPFLAGS_HEAPTYPE)
- && keys == ((PyHeapTypeObject*)type)->ht_cached_keys
- ) {
- // Keys are shared
+ if (type->tp_inline_values_offset && dict == NULL) {
+ // Virtual dictionary
+ PyDictKeysObject *keys = ((PyHeapTypeObject *)type)->ht_cached_keys;
+ assert(type->tp_inline_values_offset > 0);
assert(PyUnicode_CheckExact(name));
Py_ssize_t index = _PyDictKeys_StringLookup(keys, name);
assert (index != DKIX_ERROR);
@@ -613,18 +609,17 @@ specialize_dict_access(
SPECIALIZATION_FAIL(base_op, SPEC_FAIL_OUT_OF_RANGE);
return 0;
}
- uint32_t keys_version = _PyDictKeys_GetVersionForCurrentState(keys);
- if (keys_version == 0) {
- SPECIALIZATION_FAIL(base_op, SPEC_FAIL_OUT_OF_VERSIONS);
- return 0;
- }
- cache1->dk_version_or_hint = keys_version;
cache1->tp_version = type->tp_version_tag;
cache0->index = (uint16_t)index;
- *instr = _Py_MAKECODEUNIT(split_op, _Py_OPARG(*instr));
+ *instr = _Py_MAKECODEUNIT(values_op, _Py_OPARG(*instr));
return 0;
}
else {
+ if (dict == NULL || !PyDict_CheckExact(dict)) {
+ SPECIALIZATION_FAIL(base_op, SPEC_FAIL_NO_DICT);
+ return 0;
+ }
+ // We found an instance with a __dict__.
PyObject *value = NULL;
Py_ssize_t hint =
_PyDict_GetItemHint(dict, name, -1, &value);
@@ -736,7 +731,7 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name, Sp
}
int err = specialize_dict_access(
owner, instr, type, kind, name, cache0, cache1,
- LOAD_ATTR, LOAD_ATTR_SPLIT_KEYS, LOAD_ATTR_WITH_HINT
+ LOAD_ATTR, LOAD_ATTR_INSTANCE_VALUE, LOAD_ATTR_WITH_HINT
);
if (err < 0) {
return -1;
@@ -818,7 +813,7 @@ _Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name, S
int err = specialize_dict_access(
owner, instr, type, kind, name, cache0, cache1,
- STORE_ATTR, STORE_ATTR_SPLIT_KEYS, STORE_ATTR_WITH_HINT
+ STORE_ATTR, STORE_ATTR_INSTANCE_VALUE, STORE_ATTR_WITH_HINT
);
if (err < 0) {
return -1;
@@ -875,6 +870,27 @@ load_method_fail_kind(DesciptorClassification kind)
}
#endif
+static int
+specialize_class_load_method(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name,
+ _PyAttrCache *cache1, _PyObjectCache *cache2)
+{
+
+ PyObject *descr = NULL;
+ DesciptorClassification kind = 0;
+ kind = analyze_descriptor((PyTypeObject *)owner, name, &descr, 0);
+ switch (kind) {
+ case METHOD:
+ case NON_DESCRIPTOR:
+ cache1->tp_version = ((PyTypeObject *)owner)->tp_version_tag;
+ cache2->obj = descr;
+ *instr = _Py_MAKECODEUNIT(LOAD_METHOD_CLASS, _Py_OPARG(*instr));
+ return 0;
+ default:
+ SPECIALIZATION_FAIL(LOAD_METHOD, load_method_fail_kind(kind));
+ return -1;
+ }
+}
+
// Please collect stats carefully before and after modifying. A subtle change
// can cause a significant drop in cache hits. A possible test is
// python.exe -m test_typing test_re test_dis test_zlib.
@@ -886,7 +902,6 @@ _Py_Specialize_LoadMethod(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name,
_PyObjectCache *cache2 = &cache[-2].obj;
PyTypeObject *owner_cls = Py_TYPE(owner);
- PyDictObject *owner_dict = NULL;
if (PyModule_CheckExact(owner)) {
int err = specialize_module_load_attr(owner, instr, name, cache0, cache1,
LOAD_METHOD, LOAD_METHOD_MODULE);
@@ -900,9 +915,12 @@ _Py_Specialize_LoadMethod(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name,
return -1;
}
}
- if (Py_TYPE(owner_cls)->tp_dictoffset < 0) {
- SPECIALIZATION_FAIL(LOAD_METHOD, SPEC_FAIL_OUT_OF_RANGE);
- goto fail;
+ if (PyType_Check(owner)) {
+ int err = specialize_class_load_method(owner, instr, name, cache1, cache2);
+ if (err) {
+ goto fail;
+ }
+ goto success;
}
// Technically this is fine for bound method calls, but it's uncommon and
// slightly slower at runtime to get dict.
@@ -910,66 +928,45 @@ _Py_Specialize_LoadMethod(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name,
SPECIALIZATION_FAIL(LOAD_METHOD, SPEC_FAIL_OUT_OF_RANGE);
goto fail;
}
- PyObject **owner_dictptr = _PyObject_GetDictPtr(owner);
- int owner_has_dict = (owner_dictptr != NULL && *owner_dictptr != NULL);
- owner_dict = owner_has_dict ? (PyDictObject *)*owner_dictptr : NULL;
- // Make sure dict doesn't get GC-ed halfway.
- Py_XINCREF(owner_dict);
- // Check for classmethods.
- int owner_is_class = PyType_Check(owner);
- owner_cls = owner_is_class ? (PyTypeObject *)owner : owner_cls;
-
- if ((owner_cls->tp_flags & Py_TPFLAGS_VALID_VERSION_TAG) == 0 ||
- owner_cls->tp_version_tag == 0) {
- SPECIALIZATION_FAIL(LOAD_METHOD, SPEC_FAIL_OUT_OF_VERSIONS);
- goto fail;
- }
PyObject *descr = NULL;
DesciptorClassification kind = 0;
kind = analyze_descriptor(owner_cls, name, &descr, 0);
- // Store the version right away, in case it's modified halfway through.
- cache1->tp_version = owner_cls->tp_version_tag;
-
assert(descr != NULL || kind == ABSENT || kind == GETSET_OVERRIDDEN);
if (kind != METHOD) {
SPECIALIZATION_FAIL(LOAD_METHOD, load_method_fail_kind(kind));
goto fail;
}
- // If o.__dict__ changes, the method might be found in o.__dict__
- // instead of old type lookup. So record o.__dict__'s keys.
- uint32_t keys_version = UINT32_MAX;
- if (owner_has_dict) {
- // _PyDictKeys_GetVersionForCurrentState isn't accurate for
- // custom dict subclasses at the moment.
- if (!PyDict_CheckExact(owner_dict)) {
- SPECIALIZATION_FAIL(LOAD_METHOD, SPEC_FAIL_DICT_SUBCLASS);
+ if (owner_cls->tp_inline_values_offset) {
+ PyObject **owner_dictptr = _PyObject_DictPointer(owner);
+ assert(owner_dictptr);
+ if (*owner_dictptr) {
+ SPECIALIZATION_FAIL(LOAD_METHOD, SPEC_FAIL_IS_ATTR);
goto fail;
}
- assert(PyUnicode_CheckExact(name));
- Py_hash_t hash = PyObject_Hash(name);
- if (hash == -1) {
- return -1;
- }
- PyObject *value = NULL;
- if (!owner_is_class) {
- // Instance methods shouldn't be in o.__dict__. That makes
- // it an attribute.
- Py_ssize_t ix = _Py_dict_lookup(owner_dict, name, hash, &value);
- assert(ix != DKIX_ERROR);
- if (ix != DKIX_EMPTY) {
- SPECIALIZATION_FAIL(LOAD_METHOD, SPEC_FAIL_IS_ATTR);
- goto fail;
- }
+ PyDictKeysObject *keys = ((PyHeapTypeObject *)owner_cls)->ht_cached_keys;
+ Py_ssize_t index = _PyDictKeys_StringLookup(keys, name);
+ if (index != DKIX_EMPTY) {
+ SPECIALIZATION_FAIL(LOAD_METHOD, SPEC_FAIL_IS_ATTR);
+ goto fail;
}
- keys_version = _PyDictKeys_GetVersionForCurrentState(owner_dict->ma_keys);
+ uint32_t keys_version = _PyDictKeys_GetVersionForCurrentState(keys);
if (keys_version == 0) {
- SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_OUT_OF_VERSIONS);
+ SPECIALIZATION_FAIL(LOAD_METHOD, SPEC_FAIL_OUT_OF_VERSIONS);
goto fail;
}
- // Fall through.
- } // Else owner is maybe a builtin with no dict, or __slots__. Doesn't matter.
-
+ cache1->dk_version_or_hint = keys_version;
+ *instr = _Py_MAKECODEUNIT(LOAD_METHOD_CACHED, _Py_OPARG(*instr));
+ }
+ else {
+ if (owner_cls->tp_dictoffset == 0) {
+ *instr = _Py_MAKECODEUNIT(LOAD_METHOD_NO_DICT, _Py_OPARG(*instr));
+ }
+ else {
+ SPECIALIZATION_FAIL(LOAD_METHOD, SPEC_FAIL_IS_ATTR);
+ goto fail;
+ }
+ }
/* `descr` is borrowed. This is safe for methods (even inherited ones from
* super classes!) as long as tp_version_tag is validated for two main reasons:
*
@@ -984,19 +981,15 @@ _Py_Specialize_LoadMethod(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name,
* PyType_Modified usages in typeobject.c). The MCACHE has been
* working since Python 2.6 and it's battle-tested.
*/
+ cache1->tp_version = owner_cls->tp_version_tag;
cache2->obj = descr;
- cache1->dk_version_or_hint = keys_version;
- *instr = _Py_MAKECODEUNIT(owner_is_class ? LOAD_METHOD_CLASS :
- LOAD_METHOD_CACHED, _Py_OPARG(*instr));
// Fall through.
success:
- Py_XDECREF(owner_dict);
STAT_INC(LOAD_METHOD, specialization_success);
assert(!PyErr_Occurred());
cache0->counter = saturating_start();
return 0;
fail:
- Py_XDECREF(owner_dict);
STAT_INC(LOAD_METHOD, specialization_failure);
assert(!PyErr_Occurred());
cache_backoff(cache0);
diff --git a/Tools/gdb/libpython.py b/Tools/gdb/libpython.py
index a118d32..54f7232 100755
--- a/Tools/gdb/libpython.py
+++ b/Tools/gdb/libpython.py
@@ -445,10 +445,11 @@ def _write_instance_repr(out, visited, name, pyop_attrdict, address):
out.write(name)
# Write dictionary of instance attributes:
- if isinstance(pyop_attrdict, PyDictObjectPtr):
+ if isinstance(pyop_attrdict, (PyKeysValuesPair, PyDictObjectPtr)):
out.write('(')
first = True
- for pyop_arg, pyop_val in pyop_attrdict.iteritems():
+ items = pyop_attrdict.iteritems()
+ for pyop_arg, pyop_val in items:
if not first:
out.write(', ')
first = False
@@ -520,6 +521,25 @@ class HeapTypeObjectPtr(PyObjectPtr):
# Not found, or some kind of error:
return None
+ def get_keys_values(self):
+ typeobj = self.type()
+ values_offset = int_from_int(typeobj.field('tp_inline_values_offset'))
+ if values_offset == 0:
+ return None
+ charptr = self._gdbval.cast(_type_char_ptr()) + values_offset
+ PyDictValuesPtrPtr = gdb.lookup_type("PyDictValues").pointer().pointer()
+ valuesptr = charptr.cast(PyDictValuesPtrPtr)
+ values = valuesptr.dereference()
+ if long(values) == 0:
+ return None
+ values = values['values']
+ return PyKeysValuesPair(self.get_cached_keys(), values)
+
+ def get_cached_keys(self):
+ typeobj = self.type()
+ HeapTypePtr = gdb.lookup_type("PyHeapTypeObject").pointer()
+ return typeobj._gdbval.cast(HeapTypePtr)['ht_cached_keys']
+
def proxyval(self, visited):
'''
Support for classes.
@@ -533,7 +553,10 @@ class HeapTypeObjectPtr(PyObjectPtr):
visited.add(self.as_address())
pyop_attr_dict = self.get_attr_dict()
- if pyop_attr_dict:
+ keys_values = self.get_keys_values()
+ if keys_values:
+ attr_dict = keys_values.proxyval(visited)
+ elif pyop_attr_dict:
attr_dict = pyop_attr_dict.proxyval(visited)
else:
attr_dict = {}
@@ -549,9 +572,11 @@ class HeapTypeObjectPtr(PyObjectPtr):
return
visited.add(self.as_address())
- pyop_attrdict = self.get_attr_dict()
+ pyop_attrs = self.get_keys_values()
+ if not pyop_attrs:
+ pyop_attrs = self.get_attr_dict()
_write_instance_repr(out, visited,
- self.safe_tp_name(), pyop_attrdict, self.as_address())
+ self.safe_tp_name(), pyop_attrs, self.as_address())
class ProxyException(Exception):
def __init__(self, tp_name, args):
@@ -673,6 +698,32 @@ class PyCodeObjectPtr(PyObjectPtr):
assert False, "Unreachable"
+def items_from_keys_and_values(keys, values):
+ entries, nentries = PyDictObjectPtr._get_entries(keys)
+ for i in safe_range(nentries):
+ ep = entries[i]
+ pyop_value = PyObjectPtr.from_pyobject_ptr(values[i])
+ if not pyop_value.is_null():
+ pyop_key = PyObjectPtr.from_pyobject_ptr(ep['me_key'])
+ yield (pyop_key, pyop_value)
+
+class PyKeysValuesPair:
+
+ def __init__(self, keys, values):
+ self.keys = keys
+ self.values = values
+
+ def iteritems(self):
+ return items_from_keys_and_values(self.keys, self.values)
+
+ def proxyval(self, visited):
+ result = {}
+ for pyop_key, pyop_value in self.iteritems():
+ proxy_key = pyop_key.proxyval(visited)
+ proxy_value = pyop_value.proxyval(visited)
+ result[proxy_key] = proxy_value
+ return result
+
class PyDictObjectPtr(PyObjectPtr):
"""
Class wrapping a gdb.Value that's a PyDictObject* i.e. a dict instance
@@ -690,13 +741,14 @@ class PyDictObjectPtr(PyObjectPtr):
has_values = long(values)
if has_values:
values = values['values']
+ if has_values:
+ for item in items_from_keys_and_values(keys, values):
+ yield item
+ return
entries, nentries = self._get_entries(keys)
for i in safe_range(nentries):
ep = entries[i]
- if has_values:
- pyop_value = PyObjectPtr.from_pyobject_ptr(values[i])
- else:
- pyop_value = PyObjectPtr.from_pyobject_ptr(ep['me_value'])
+ pyop_value = PyObjectPtr.from_pyobject_ptr(ep['me_value'])
if not pyop_value.is_null():
pyop_key = PyObjectPtr.from_pyobject_ptr(ep['me_key'])
yield (pyop_key, pyop_value)
@@ -732,7 +784,8 @@ class PyDictObjectPtr(PyObjectPtr):
pyop_value.write_repr(out, visited)
out.write('}')
- def _get_entries(self, keys):
+ @staticmethod
+ def _get_entries(keys):
dk_nentries = int(keys['dk_nentries'])
dk_size = 1<<int(keys['dk_log2_size'])
try:
@@ -1958,7 +2011,7 @@ def move_in_stack(move_up):
print('Unable to find an older python frame')
else:
print('Unable to find a newer python frame')
-
+
class PyUp(gdb.Command):
'Select and print all python stack frame in the same eval loop starting from the one that called this one (if any)'