From 04eb5c8db1e24cabd0cb81392bb2632c03be1550 Mon Sep 17 00:00:00 2001 From: sobolevn Date: Sat, 27 Jul 2024 21:33:38 +0300 Subject: [PATCH 01/70] gh-122361: Use proper `PyUnicodeWriter_*` API in `constevaluator_call` (#122362) --- Objects/typevarobject.c | 32 +++++++++++++++++--------------- 1 file changed, 17 insertions(+), 15 deletions(-) diff --git a/Objects/typevarobject.c b/Objects/typevarobject.c index fb1f260571b582..3c96850589d378 100644 --- a/Objects/typevarobject.c +++ b/Objects/typevarobject.c @@ -169,38 +169,40 @@ constevaluator_call(PyObject *self, PyObject *args, PyObject *kwargs) } PyObject *value = ((constevaluatorobject *)self)->value; if (format == 3) { // SOURCE - _PyUnicodeWriter writer; - _PyUnicodeWriter_Init(&writer); + PyUnicodeWriter *writer = PyUnicodeWriter_Create(5); // cannot be <5 + if (writer == NULL) { + return NULL; + } if (PyTuple_Check(value)) { - if (_PyUnicodeWriter_WriteASCIIString(&writer, "(", 1) < 0) { - _PyUnicodeWriter_Dealloc(&writer); + if (PyUnicodeWriter_WriteChar(writer, '(') < 0) { + PyUnicodeWriter_Discard(writer); return NULL; } for (Py_ssize_t i = 0; i < PyTuple_GET_SIZE(value); i++) { PyObject *item = PyTuple_GET_ITEM(value, i); if (i > 0) { - if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ", 2) < 0) { - _PyUnicodeWriter_Dealloc(&writer); + if (PyUnicodeWriter_WriteUTF8(writer, ", ", 2) < 0) { + PyUnicodeWriter_Discard(writer); return NULL; } } - if (_Py_typing_type_repr(&writer, item) < 0) { - _PyUnicodeWriter_Dealloc(&writer); + if (_Py_typing_type_repr(writer, item) < 0) { + PyUnicodeWriter_Discard(writer); return NULL; } } - if (_PyUnicodeWriter_WriteASCIIString(&writer, ")", 1) < 0) { - _PyUnicodeWriter_Dealloc(&writer); + if (PyUnicodeWriter_WriteChar(writer, ')') < 0) { + PyUnicodeWriter_Discard(writer); return NULL; } } else { - if (_Py_typing_type_repr(&writer, value) < 0) { - _PyUnicodeWriter_Dealloc(&writer); + if (_Py_typing_type_repr(writer, value) < 0) { + PyUnicodeWriter_Discard(writer); return NULL; } } - return _PyUnicodeWriter_Finish(&writer); + return PyUnicodeWriter_Finish(writer); } return Py_NewRef(value); } @@ -259,7 +261,7 @@ _Py_typing_type_repr(PyUnicodeWriter *writer, PyObject *p) } if (p == (PyObject *)&_PyNone_Type) { - return _PyUnicodeWriter_WriteASCIIString(writer, "None", 4); + return PyUnicodeWriter_WriteUTF8(writer, "None", 4); } if ((rc = PyObject_HasAttrWithError(p, &_Py_ID(__origin__))) > 0 && @@ -306,7 +308,7 @@ _Py_typing_type_repr(PyUnicodeWriter *writer, PyObject *p) if (r == NULL) { return -1; } - rc = _PyUnicodeWriter_WriteStr(writer, r); + rc = PyUnicodeWriter_WriteStr(writer, r); Py_DECREF(r); return rc; } From 3ff5ce4706630207bb2c2e2589a4501bf0d1bd78 Mon Sep 17 00:00:00 2001 From: Jelle Zijlstra Date: Sun, 28 Jul 2024 00:50:14 -0700 Subject: [PATCH 02/70] gh-119180: Add myself as CODEOWNER for annotationlib (#122366) Co-authored-by: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com> --- .github/CODEOWNERS | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 95e30ac3001c9c..9aa5004b0cdb7f 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -214,6 +214,7 @@ Doc/c-api/stable.rst @encukou **/*idlelib* @terryjreedy /Doc/library/idle.rst @terryjreedy +**/*annotationlib* @JelleZijlstra **/*typing* @JelleZijlstra @AlexWaygood **/*ftplib @giampaolo From aa449cf063581ea515e2a6194d175f5e1db3d62e Mon Sep 17 00:00:00 2001 From: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com> Date: Sun, 28 Jul 2024 10:53:21 +0300 Subject: [PATCH 03/70] gh-122085: Create dedicated page for deprecations (#122352) Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com> --- Doc/contents.rst | 1 + Doc/deprecations/index.rst | 10 ++++++++++ Doc/tools/templates/indexcontent.html | 2 ++ 3 files changed, 13 insertions(+) create mode 100644 Doc/deprecations/index.rst diff --git a/Doc/contents.rst b/Doc/contents.rst index 24ceacb0076b5e..b57f4b09a5dcb6 100644 --- a/Doc/contents.rst +++ b/Doc/contents.rst @@ -14,6 +14,7 @@ installing/index.rst howto/index.rst faq/index.rst + deprecations/index.rst glossary.rst about.rst diff --git a/Doc/deprecations/index.rst b/Doc/deprecations/index.rst new file mode 100644 index 00000000000000..cfb30dd09aef6f --- /dev/null +++ b/Doc/deprecations/index.rst @@ -0,0 +1,10 @@ +Deprecations +============ + +.. include:: pending-removal-in-3.14.rst + +.. include:: pending-removal-in-3.15.rst + +.. include:: pending-removal-in-3.16.rst + +.. include:: pending-removal-in-future.rst diff --git a/Doc/tools/templates/indexcontent.html b/Doc/tools/templates/indexcontent.html index 6f854e86ab8ef1..f2e9fbb0106452 100644 --- a/Doc/tools/templates/indexcontent.html +++ b/Doc/tools/templates/indexcontent.html @@ -33,6 +33,8 @@

{{ docstitle|e }}

{% trans %}C API reference{% endtrans %}

+ From b359f66c4c315ca14b2a075ee136145ba6610760 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Sun, 28 Jul 2024 09:59:07 +0200 Subject: [PATCH 04/70] gh-120593: Make _PyLong_CompactValue() parameter const again (#122367) Change _PyLong_IsCompact() and _PyLong_CompactValue() parameter type from 'PyObject*' to 'const PyObject*'. Avoid the Py_TYPE() macro which does not support const parameter. --- Include/cpython/longintrepr.h | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Include/cpython/longintrepr.h b/Include/cpython/longintrepr.h index d841c043f37fc4..c60ccc463653f9 100644 --- a/Include/cpython/longintrepr.h +++ b/Include/cpython/longintrepr.h @@ -119,18 +119,18 @@ PyAPI_FUNC(PyLongObject*) _PyLong_FromDigits( static inline int -_PyLong_IsCompact(PyLongObject* op) { - assert(PyType_HasFeature(Py_TYPE(op), Py_TPFLAGS_LONG_SUBCLASS)); +_PyLong_IsCompact(const PyLongObject* op) { + assert(PyType_HasFeature(op->ob_base.ob_type, Py_TPFLAGS_LONG_SUBCLASS)); return op->long_value.lv_tag < (2 << _PyLong_NON_SIZE_BITS); } #define PyUnstable_Long_IsCompact _PyLong_IsCompact static inline Py_ssize_t -_PyLong_CompactValue(PyLongObject *op) +_PyLong_CompactValue(const PyLongObject *op) { Py_ssize_t sign; - assert(PyType_HasFeature(Py_TYPE(op), Py_TPFLAGS_LONG_SUBCLASS)); + assert(PyType_HasFeature(op->ob_base.ob_type, Py_TPFLAGS_LONG_SUBCLASS)); assert(PyUnstable_Long_IsCompact(op)); sign = 1 - (op->long_value.lv_tag & _PyLong_SIGN_MASK); return sign * (Py_ssize_t)op->long_value.ob_digit[0]; From bc93923a2dee00751e44da58b6967c63e3f5c392 Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Sun, 28 Jul 2024 11:33:17 +0300 Subject: [PATCH 05/70] gh-122311: Add more tests for pickle (GH-122376) --- Lib/test/pickletester.py | 770 ++++++++++++++++++++++++++++++++------- Lib/test/test_pickle.py | 18 + 2 files changed, 660 insertions(+), 128 deletions(-) diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py index 13663220fc77ea..174f4ff6d021b2 100644 --- a/Lib/test/pickletester.py +++ b/Lib/test/pickletester.py @@ -144,6 +144,14 @@ class E(C): def __getinitargs__(self): return () +import __main__ +__main__.C = C +C.__module__ = "__main__" +__main__.D = D +D.__module__ = "__main__" +__main__.E = E +E.__module__ = "__main__" + # Simple mutable object. class Object: pass @@ -157,14 +165,6 @@ def __reduce__(self): # Shouldn't support the recursion itself return K, (self.value,) -import __main__ -__main__.C = C -C.__module__ = "__main__" -__main__.D = D -D.__module__ = "__main__" -__main__.E = E -E.__module__ = "__main__" - class myint(int): def __init__(self, x): self.str = str(x) @@ -1179,6 +1179,124 @@ def test_compat_unpickle(self): self.assertIs(type(unpickled), collections.UserDict) self.assertEqual(unpickled, collections.UserDict({1: 2})) + def test_load_global(self): + self.assertIs(self.loads(b'cbuiltins\nstr\n.'), str) + self.assertIs(self.loads(b'cmath\nlog\n.'), math.log) + self.assertIs(self.loads(b'cos.path\njoin\n.'), os.path.join) + self.assertIs(self.loads(b'\x80\x04cbuiltins\nstr.upper\n.'), str.upper) + with support.swap_item(sys.modules, 'mödule', types.SimpleNamespace(glöbal=42)): + self.assertEqual(self.loads(b'\x80\x04cm\xc3\xb6dule\ngl\xc3\xb6bal\n.'), 42) + + self.assertRaises(UnicodeDecodeError, self.loads, b'c\xff\nlog\n.') + self.assertRaises(UnicodeDecodeError, self.loads, b'cmath\n\xff\n.') + self.assertRaises(self.truncated_errors, self.loads, b'c\nlog\n.') + self.assertRaises(self.truncated_errors, self.loads, b'cmath\n\n.') + self.assertRaises(self.truncated_errors, self.loads, b'\x80\x04cmath\n\n.') + + def test_load_stack_global(self): + self.assertIs(self.loads(b'\x8c\x08builtins\x8c\x03str\x93.'), str) + self.assertIs(self.loads(b'\x8c\x04math\x8c\x03log\x93.'), math.log) + self.assertIs(self.loads(b'\x8c\x07os.path\x8c\x04join\x93.'), + os.path.join) + self.assertIs(self.loads(b'\x80\x04\x8c\x08builtins\x8c\x09str.upper\x93.'), + str.upper) + with support.swap_item(sys.modules, 'mödule', types.SimpleNamespace(glöbal=42)): + self.assertEqual(self.loads(b'\x80\x04\x8c\x07m\xc3\xb6dule\x8c\x07gl\xc3\xb6bal\x93.'), 42) + + self.assertRaises(UnicodeDecodeError, self.loads, b'\x8c\x01\xff\x8c\x03log\x93.') + self.assertRaises(UnicodeDecodeError, self.loads, b'\x8c\x04math\x8c\x01\xff\x93.') + self.assertRaises(ValueError, self.loads, b'\x8c\x00\x8c\x03log\x93.') + self.assertRaises(AttributeError, self.loads, b'\x8c\x04math\x8c\x00\x93.') + self.assertRaises(AttributeError, self.loads, b'\x80\x04\x8c\x04math\x8c\x00\x93.') + + self.assertRaises(pickle.UnpicklingError, self.loads, b'N\x8c\x03log\x93.') + self.assertRaises(pickle.UnpicklingError, self.loads, b'\x8c\x04mathN\x93.') + self.assertRaises(pickle.UnpicklingError, self.loads, b'\x80\x04\x8c\x04mathN\x93.') + + def test_find_class(self): + unpickler = self.unpickler(io.BytesIO()) + unpickler_nofix = self.unpickler(io.BytesIO(), fix_imports=False) + unpickler4 = self.unpickler(io.BytesIO(b'\x80\x04N.')) + unpickler4.load() + + self.assertIs(unpickler.find_class('__builtin__', 'str'), str) + self.assertRaises(ModuleNotFoundError, + unpickler_nofix.find_class, '__builtin__', 'str') + self.assertIs(unpickler.find_class('builtins', 'str'), str) + self.assertIs(unpickler_nofix.find_class('builtins', 'str'), str) + self.assertIs(unpickler.find_class('math', 'log'), math.log) + self.assertIs(unpickler.find_class('os.path', 'join'), os.path.join) + self.assertIs(unpickler.find_class('os.path', 'join'), os.path.join) + + self.assertIs(unpickler4.find_class('builtins', 'str.upper'), str.upper) + with self.assertRaises(AttributeError): + unpickler.find_class('builtins', 'str.upper') + + with self.assertRaises(AttributeError): + unpickler.find_class('math', 'spam') + with self.assertRaises(AttributeError): + unpickler4.find_class('math', 'spam') + with self.assertRaises(AttributeError): + unpickler.find_class('math', 'log.spam') + with self.assertRaises(AttributeError): + unpickler4.find_class('math', 'log.spam') + with self.assertRaises(AttributeError): + unpickler.find_class('math', 'log..spam') + with self.assertRaises(AttributeError): + unpickler4.find_class('math', 'log..spam') + with self.assertRaises(AttributeError): + unpickler.find_class('math', '') + with self.assertRaises(AttributeError): + unpickler4.find_class('math', '') + self.assertRaises(ModuleNotFoundError, unpickler.find_class, 'spam', 'log') + self.assertRaises(ValueError, unpickler.find_class, '', 'log') + + self.assertRaises(TypeError, unpickler.find_class, None, 'log') + self.assertRaises(TypeError, unpickler.find_class, 'math', None) + self.assertRaises((TypeError, AttributeError), unpickler4.find_class, 'math', None) + + def test_custom_find_class(self): + def loads(data): + class Unpickler(self.unpickler): + def find_class(self, module_name, global_name): + return (module_name, global_name) + return Unpickler(io.BytesIO(data)).load() + + self.assertEqual(loads(b'cmath\nlog\n.'), ('math', 'log')) + self.assertEqual(loads(b'\x8c\x04math\x8c\x03log\x93.'), ('math', 'log')) + + def loads(data): + class Unpickler(self.unpickler): + @staticmethod + def find_class(module_name, global_name): + return (module_name, global_name) + return Unpickler(io.BytesIO(data)).load() + + self.assertEqual(loads(b'cmath\nlog\n.'), ('math', 'log')) + self.assertEqual(loads(b'\x8c\x04math\x8c\x03log\x93.'), ('math', 'log')) + + def loads(data): + class Unpickler(self.unpickler): + @classmethod + def find_class(cls, module_name, global_name): + return (module_name, global_name) + return Unpickler(io.BytesIO(data)).load() + + self.assertEqual(loads(b'cmath\nlog\n.'), ('math', 'log')) + self.assertEqual(loads(b'\x8c\x04math\x8c\x03log\x93.'), ('math', 'log')) + + def loads(data): + class Unpickler(self.unpickler): + pass + def find_class(module_name, global_name): + return (module_name, global_name) + unpickler = Unpickler(io.BytesIO(data)) + unpickler.find_class = find_class + return unpickler.load() + + self.assertEqual(loads(b'cmath\nlog\n.'), ('math', 'log')) + self.assertEqual(loads(b'\x8c\x04math\x8c\x03log\x93.'), ('math', 'log')) + def test_bad_reduce(self): self.assertEqual(self.loads(b'cbuiltins\nint\n)R.'), 0) self.check_unpickling_error(TypeError, b'N)R.') @@ -1443,6 +1561,474 @@ def t(): [ToBeUnpickled] * 2) +class AbstractPicklingErrorTests: + # Subclass must define self.dumps, self.pickler. + + def test_bad_reduce_result(self): + obj = REX([print, ()]) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + obj = REX((print,)) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + obj = REX((print, (), None, None, None, None, None)) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + def test_bad_reconstructor(self): + obj = REX((42, ())) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + def test_unpickleable_reconstructor(self): + obj = REX((UnpickleableCallable(), ())) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_bad_reconstructor_args(self): + obj = REX((print, [])) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + def test_unpickleable_reconstructor_args(self): + obj = REX((print, (1, 2, UNPICKLEABLE))) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_bad_newobj_args(self): + obj = REX((copyreg.__newobj__, ())) + for proto in protocols[2:]: + with self.subTest(proto=proto): + with self.assertRaises((IndexError, pickle.PicklingError)) as cm: + self.dumps(obj, proto) + + obj = REX((copyreg.__newobj__, [REX])) + for proto in protocols[2:]: + with self.subTest(proto=proto): + with self.assertRaises((IndexError, pickle.PicklingError)): + self.dumps(obj, proto) + + def test_bad_newobj_class(self): + obj = REX((copyreg.__newobj__, (NoNew(),))) + for proto in protocols[2:]: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + def test_wrong_newobj_class(self): + obj = REX((copyreg.__newobj__, (str,))) + for proto in protocols[2:]: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + def test_unpickleable_newobj_class(self): + class LocalREX(REX): pass + obj = LocalREX((copyreg.__newobj__, (LocalREX,))) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises((pickle.PicklingError, AttributeError)): + self.dumps(obj, proto) + + def test_unpickleable_newobj_args(self): + obj = REX((copyreg.__newobj__, (REX, 1, 2, UNPICKLEABLE))) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_bad_newobj_ex_args(self): + obj = REX((copyreg.__newobj_ex__, ())) + for proto in protocols[2:]: + with self.subTest(proto=proto): + with self.assertRaises((ValueError, pickle.PicklingError)): + self.dumps(obj, proto) + + obj = REX((copyreg.__newobj_ex__, 42)) + for proto in protocols[2:]: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + obj = REX((copyreg.__newobj_ex__, (REX, 42, {}))) + is_py = self.pickler is pickle._Pickler + for proto in protocols[2:4] if is_py else protocols[2:]: + with self.subTest(proto=proto): + with self.assertRaises((TypeError, pickle.PicklingError)): + self.dumps(obj, proto) + + obj = REX((copyreg.__newobj_ex__, (REX, (), []))) + for proto in protocols[2:4] if is_py else protocols[2:]: + with self.subTest(proto=proto): + with self.assertRaises((TypeError, pickle.PicklingError)): + self.dumps(obj, proto) + + def test_bad_newobj_ex__class(self): + obj = REX((copyreg.__newobj_ex__, (NoNew(), (), {}))) + for proto in protocols[2:]: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + def test_wrong_newobj_ex_class(self): + if self.pickler is not pickle._Pickler: + self.skipTest('only verified in the Python implementation') + obj = REX((copyreg.__newobj_ex__, (str, (), {}))) + for proto in protocols[2:]: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + def test_unpickleable_newobj_ex_class(self): + class LocalREX(REX): pass + obj = LocalREX((copyreg.__newobj_ex__, (LocalREX, (), {}))) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises((pickle.PicklingError, AttributeError)): + self.dumps(obj, proto) + + def test_unpickleable_newobj_ex_args(self): + obj = REX((copyreg.__newobj_ex__, (REX, (1, 2, UNPICKLEABLE), {}))) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_unpickleable_newobj_ex_kwargs(self): + obj = REX((copyreg.__newobj_ex__, (REX, (), {'a': UNPICKLEABLE}))) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_unpickleable_state(self): + obj = REX_state(UNPICKLEABLE) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_bad_state_setter(self): + if self.pickler is pickle._Pickler: + self.skipTest('only verified in the C implementation') + obj = REX((print, (), 'state', None, None, 42)) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + def test_unpickleable_state_setter(self): + obj = REX((print, (), 'state', None, None, UnpickleableCallable())) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_unpickleable_state_with_state_setter(self): + obj = REX((print, (), UNPICKLEABLE, None, None, print)) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_bad_object_list_items(self): + # Issue4176: crash when 4th and 5th items of __reduce__() + # are not iterators + obj = REX((list, (), None, 42)) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises((TypeError, pickle.PicklingError)): + self.dumps(obj, proto) + + if self.pickler is not pickle._Pickler: + # Python implementation is less strict and also accepts iterables. + obj = REX((list, (), None, [])) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises((TypeError, pickle.PicklingError)): + self.dumps(obj, proto) + + def test_unpickleable_object_list_items(self): + obj = REX_six([1, 2, UNPICKLEABLE]) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_bad_object_dict_items(self): + # Issue4176: crash when 4th and 5th items of __reduce__() + # are not iterators + obj = REX((dict, (), None, None, 42)) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises((TypeError, pickle.PicklingError)): + self.dumps(obj, proto) + + for proto in protocols: + obj = REX((dict, (), None, None, iter([('a',)]))) + with self.subTest(proto=proto): + with self.assertRaises((ValueError, TypeError)): + self.dumps(obj, proto) + + if self.pickler is not pickle._Pickler: + # Python implementation is less strict and also accepts iterables. + obj = REX((dict, (), None, None, [])) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises((TypeError, pickle.PicklingError)): + self.dumps(obj, proto) + + def test_unpickleable_object_dict_items(self): + obj = REX_seven({'a': UNPICKLEABLE}) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_unpickleable_list_items(self): + obj = [1, [2, 3, UNPICKLEABLE]] + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + for n in [0, 1, 1000, 1005]: + obj = [*range(n), UNPICKLEABLE] + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_unpickleable_tuple_items(self): + obj = (1, (2, 3, UNPICKLEABLE)) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + obj = (*range(10), UNPICKLEABLE) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_unpickleable_dict_items(self): + obj = {'a': {'b': UNPICKLEABLE}} + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + for n in [0, 1, 1000, 1005]: + obj = dict.fromkeys(range(n)) + obj['a'] = UNPICKLEABLE + for proto in protocols: + with self.subTest(proto=proto, n=n): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_unpickleable_set_items(self): + obj = {UNPICKLEABLE} + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_unpickleable_frozenset_items(self): + obj = frozenset({frozenset({UNPICKLEABLE})}) + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(CustomError): + self.dumps(obj, proto) + + def test_global_lookup_error(self): + # Global name does not exist + obj = REX('spam') + obj.__module__ = __name__ + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + obj.__module__ = 'nonexisting' + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + obj.__module__ = '' + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises((ValueError, pickle.PicklingError)): + self.dumps(obj, proto) + + obj.__module__ = None + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + def test_nonencodable_global_name_error(self): + for proto in protocols[:4]: + with self.subTest(proto=proto): + name = 'nonascii\xff' if proto < 3 else 'nonencodable\udbff' + obj = REX(name) + obj.__module__ = __name__ + with support.swap_item(globals(), name, obj): + with self.assertRaises((UnicodeEncodeError, pickle.PicklingError)): + self.dumps(obj, proto) + + def test_nonencodable_module_name_error(self): + for proto in protocols[:4]: + with self.subTest(proto=proto): + name = 'nonascii\xff' if proto < 3 else 'nonencodable\udbff' + obj = REX('test') + obj.__module__ = name + mod = types.SimpleNamespace(test=obj) + with support.swap_item(sys.modules, name, mod): + with self.assertRaises((UnicodeEncodeError, pickle.PicklingError)): + self.dumps(obj, proto) + + def test_nested_lookup_error(self): + # Nested name does not exist + obj = REX('AbstractPickleTests.spam') + obj.__module__ = __name__ + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + obj.__module__ = None + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + def test_wrong_object_lookup_error(self): + # Name is bound to different object + obj = REX('AbstractPickleTests') + obj.__module__ = __name__ + AbstractPickleTests.ham = [] + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + obj.__module__ = None + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises(pickle.PicklingError): + self.dumps(obj, proto) + + def test_local_lookup_error(self): + # Test that whichmodule() errors out cleanly when looking up + # an assumed globally-reachable object fails. + def f(): + pass + # Since the function is local, lookup will fail + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises((AttributeError, pickle.PicklingError)): + self.dumps(f, proto) + # Same without a __module__ attribute (exercises a different path + # in _pickle.c). + del f.__module__ + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises((AttributeError, pickle.PicklingError)): + self.dumps(f, proto) + # Yet a different path. + f.__name__ = f.__qualname__ + for proto in protocols: + with self.subTest(proto=proto): + with self.assertRaises((AttributeError, pickle.PicklingError)): + self.dumps(f, proto) + + def test_reduce_ex_None(self): + c = REX_None() + with self.assertRaises(TypeError): + self.dumps(c) + + def test_reduce_None(self): + c = R_None() + with self.assertRaises(TypeError): + self.dumps(c) + + @no_tracing + def test_bad_getattr(self): + # Issue #3514: crash when there is an infinite loop in __getattr__ + x = BadGetattr() + for proto in range(2): + with support.infinite_recursion(25): + self.assertRaises(RuntimeError, self.dumps, x, proto) + for proto in range(2, pickle.HIGHEST_PROTOCOL + 1): + s = self.dumps(x, proto) + + def test_picklebuffer_error(self): + # PickleBuffer forbidden with protocol < 5 + pb = pickle.PickleBuffer(b"foobar") + for proto in range(0, 5): + with self.subTest(proto=proto): + with self.assertRaises(pickle.PickleError): + self.dumps(pb, proto) + + def test_non_continuous_buffer(self): + if self.pickler is pickle._Pickler: + self.skipTest('CRASHES (see gh-122306)') + for proto in protocols[5:]: + with self.subTest(proto=proto): + pb = pickle.PickleBuffer(memoryview(b"foobar")[::2]) + with self.assertRaises(pickle.PicklingError): + self.dumps(pb, proto) + + def test_buffer_callback_error(self): + def buffer_callback(buffers): + raise CustomError + pb = pickle.PickleBuffer(b"foobar") + with self.assertRaises(CustomError): + self.dumps(pb, 5, buffer_callback=buffer_callback) + + def test_evil_pickler_mutating_collection(self): + # https://github.com/python/cpython/issues/92930 + global Clearer + class Clearer: + pass + + def check(collection): + class EvilPickler(self.pickler): + def persistent_id(self, obj): + if isinstance(obj, Clearer): + collection.clear() + return None + pickler = EvilPickler(io.BytesIO(), proto) + try: + pickler.dump(collection) + except RuntimeError as e: + expected = "changed size during iteration" + self.assertIn(expected, str(e)) + + for proto in protocols: + check([Clearer()]) + check([Clearer(), Clearer()]) + check({Clearer()}) + check({Clearer(), Clearer()}) + check({Clearer(): 1}) + check({Clearer(): 1, Clearer(): 2}) + check({1: Clearer(), 2: Clearer()}) + class AbstractPickleTests: # Subclass must define self.dumps, self.loads. @@ -2453,55 +3039,12 @@ def test_reduce_calls_base(self): y = self.loads(s) self.assertEqual(y._reduce_called, 1) - def test_reduce_ex_None(self): - c = REX_None() - with self.assertRaises(TypeError): - self.dumps(c) - - def test_reduce_None(self): - c = R_None() - with self.assertRaises(TypeError): - self.dumps(c) - def test_pickle_setstate_None(self): c = C_None_setstate() p = self.dumps(c) with self.assertRaises(TypeError): self.loads(p) - @no_tracing - def test_bad_getattr(self): - # Issue #3514: crash when there is an infinite loop in __getattr__ - x = BadGetattr() - for proto in range(2): - with support.infinite_recursion(25): - self.assertRaises(RuntimeError, self.dumps, x, proto) - for proto in range(2, pickle.HIGHEST_PROTOCOL + 1): - s = self.dumps(x, proto) - - def test_reduce_bad_iterator(self): - # Issue4176: crash when 4th and 5th items of __reduce__() - # are not iterators - class C(object): - def __reduce__(self): - # 4th item is not an iterator - return list, (), None, [], None - class D(object): - def __reduce__(self): - # 5th item is not an iterator - return dict, (), None, None, [] - - # Python implementation is less strict and also accepts iterables. - for proto in protocols: - try: - self.dumps(C(), proto) - except pickle.PicklingError: - pass - try: - self.dumps(D(), proto) - except pickle.PicklingError: - pass - def test_many_puts_and_gets(self): # Test that internal data structures correctly deal with lots of # puts/gets. @@ -2950,27 +3493,6 @@ def test_compat_pickle(self): self.assertIn(('c%s\n%s' % (mod, name)).encode(), pickled) self.assertIs(type(self.loads(pickled)), type(val)) - def test_local_lookup_error(self): - # Test that whichmodule() errors out cleanly when looking up - # an assumed globally-reachable object fails. - def f(): - pass - # Since the function is local, lookup will fail - for proto in range(0, pickle.HIGHEST_PROTOCOL + 1): - with self.assertRaises((AttributeError, pickle.PicklingError)): - pickletools.dis(self.dumps(f, proto)) - # Same without a __module__ attribute (exercises a different path - # in _pickle.c). - del f.__module__ - for proto in range(0, pickle.HIGHEST_PROTOCOL + 1): - with self.assertRaises((AttributeError, pickle.PicklingError)): - pickletools.dis(self.dumps(f, proto)) - # Yet a different path. - f.__name__ = f.__qualname__ - for proto in range(0, pickle.HIGHEST_PROTOCOL + 1): - with self.assertRaises((AttributeError, pickle.PicklingError)): - pickletools.dis(self.dumps(f, proto)) - # # PEP 574 tests below # @@ -3081,20 +3603,6 @@ def test_oob_buffers_writable_to_readonly(self): self.assertIs(type(new), type(obj)) self.assertEqual(new, obj) - def test_picklebuffer_error(self): - # PickleBuffer forbidden with protocol < 5 - pb = pickle.PickleBuffer(b"foobar") - for proto in range(0, 5): - with self.assertRaises(pickle.PickleError): - self.dumps(pb, proto) - - def test_buffer_callback_error(self): - def buffer_callback(buffers): - 1/0 - pb = pickle.PickleBuffer(b"foobar") - with self.assertRaises(ZeroDivisionError): - self.dumps(pb, 5, buffer_callback=buffer_callback) - def test_buffers_error(self): pb = pickle.PickleBuffer(b"foobar") for proto in range(5, pickle.HIGHEST_PROTOCOL + 1): @@ -3186,37 +3694,6 @@ def __reduce__(self): expected = "changed size during iteration" self.assertIn(expected, str(e)) - def test_evil_pickler_mutating_collection(self): - # https://github.com/python/cpython/issues/92930 - if not hasattr(self, "pickler"): - raise self.skipTest(f"{type(self)} has no associated pickler type") - - global Clearer - class Clearer: - pass - - def check(collection): - class EvilPickler(self.pickler): - def persistent_id(self, obj): - if isinstance(obj, Clearer): - collection.clear() - return None - pickler = EvilPickler(io.BytesIO(), proto) - try: - pickler.dump(collection) - except RuntimeError as e: - expected = "changed size during iteration" - self.assertIn(expected, str(e)) - - for proto in protocols: - check([Clearer()]) - check([Clearer(), Clearer()]) - check({Clearer()}) - check({Clearer(), Clearer()}) - check({Clearer(): 1}) - check({Clearer(): 1, Clearer(): 2}) - check({1: Clearer(), 2: Clearer()}) - class BigmemPickleTests: @@ -3347,6 +3824,18 @@ def test_huge_str_64b(self, size): # Test classes for reduce_ex +class R: + def __init__(self, reduce=None): + self.reduce = reduce + def __reduce__(self, proto): + return self.reduce + +class REX: + def __init__(self, reduce_ex=None): + self.reduce_ex = reduce_ex + def __reduce_ex__(self, proto): + return self.reduce_ex + class REX_one(object): """No __reduce_ex__ here, but inheriting it from object""" _reduce_called = 0 @@ -3437,6 +3926,19 @@ def __getstate__(self): __setstate__ = None +class CustomError(Exception): + pass + +class Unpickleable: + def __reduce__(self): + raise CustomError + +UNPICKLEABLE = Unpickleable() + +class UnpickleableCallable(Unpickleable): + def __call__(self, *args, **kwargs): + pass + # Test classes for newobj @@ -3505,6 +4007,12 @@ class BadGetattr: def __getattr__(self, key): self.foo +class NoNew: + def __getattribute__(self, name): + if name == '__new__': + raise AttributeError + return super().__getattribute__(name) + class AbstractPickleModuleTests: @@ -3577,7 +4085,7 @@ def raises_oserror(self, *args, **kwargs): raise OSError @property def bad_property(self): - 1/0 + raise CustomError # File without read and readline class F: @@ -3598,23 +4106,23 @@ class F: class F: read = bad_property readline = raises_oserror - self.assertRaises(ZeroDivisionError, self.Unpickler, F()) + self.assertRaises(CustomError, self.Unpickler, F()) # File with bad readline class F: readline = bad_property read = raises_oserror - self.assertRaises(ZeroDivisionError, self.Unpickler, F()) + self.assertRaises(CustomError, self.Unpickler, F()) # File with bad readline, no read class F: readline = bad_property - self.assertRaises(ZeroDivisionError, self.Unpickler, F()) + self.assertRaises(CustomError, self.Unpickler, F()) # File with bad read, no readline class F: read = bad_property - self.assertRaises((AttributeError, ZeroDivisionError), self.Unpickler, F()) + self.assertRaises((AttributeError, CustomError), self.Unpickler, F()) # File with bad peek class F: @@ -3623,7 +4131,7 @@ class F: readline = raises_oserror try: self.Unpickler(F()) - except ZeroDivisionError: + except CustomError: pass # File with bad readinto @@ -3633,7 +4141,7 @@ class F: readline = raises_oserror try: self.Unpickler(F()) - except ZeroDivisionError: + except CustomError: pass def test_pickler_bad_file(self): @@ -3646,8 +4154,8 @@ class F: class F: @property def write(self): - 1/0 - self.assertRaises(ZeroDivisionError, self.Pickler, F()) + raise CustomError + self.assertRaises(CustomError, self.Pickler, F()) def check_dumps_loads_oob_buffers(self, dumps, loads): # No need to do the full gamut of tests here, just enough to @@ -3755,9 +4263,15 @@ def test_return_correct_type(self): def test_protocol0_is_ascii_only(self): non_ascii_str = "\N{EMPTY SET}" - self.assertRaises(pickle.PicklingError, self.dumps, non_ascii_str, 0) + with self.assertRaises(pickle.PicklingError) as cm: + self.dumps(non_ascii_str, 0) + self.assertEqual(str(cm.exception), + 'persistent IDs in protocol 0 must be ASCII strings') pickled = pickle.PERSID + non_ascii_str.encode('utf-8') + b'\n.' - self.assertRaises(pickle.UnpicklingError, self.loads, pickled) + with self.assertRaises(pickle.UnpicklingError) as cm: + self.loads(pickled) + self.assertEqual(str(cm.exception), + 'persistent IDs in protocol 0 must be ASCII strings') class AbstractPicklerUnpicklerObjectTests: diff --git a/Lib/test/test_pickle.py b/Lib/test/test_pickle.py index 49aa4b386039ec..c84e507cdf645f 100644 --- a/Lib/test/test_pickle.py +++ b/Lib/test/test_pickle.py @@ -16,6 +16,7 @@ from test.pickletester import AbstractHookTests from test.pickletester import AbstractUnpickleTests +from test.pickletester import AbstractPicklingErrorTests from test.pickletester import AbstractPickleTests from test.pickletester import AbstractPickleModuleTests from test.pickletester import AbstractPersistentPicklerTests @@ -55,6 +56,18 @@ def loads(self, buf, **kwds): return u.load() +class PyPicklingErrorTests(AbstractPicklingErrorTests, unittest.TestCase): + + pickler = pickle._Pickler + + def dumps(self, arg, proto=None, **kwargs): + f = io.BytesIO() + p = self.pickler(f, proto, **kwargs) + p.dump(arg) + f.seek(0) + return bytes(f.read()) + + class PyPicklerTests(AbstractPickleTests, unittest.TestCase): pickler = pickle._Pickler @@ -88,6 +101,8 @@ def loads(self, buf, **kwds): return pickle.loads(buf, **kwds) test_framed_write_sizes_with_delayed_writer = None + test_find_class = None + test_custom_find_class = None class PersistentPicklerUnpicklerMixin(object): @@ -267,6 +282,9 @@ class CUnpicklerTests(PyUnpicklerTests): bad_stack_errors = (pickle.UnpicklingError,) truncated_errors = (pickle.UnpicklingError,) + class CPicklingErrorTests(PyPicklingErrorTests): + pickler = _pickle.Pickler + class CPicklerTests(PyPicklerTests): pickler = _pickle.Pickler unpickler = _pickle.Unpickler From 169e7138ab84db465b6bf28e6c1dc6c39dbf89f4 Mon Sep 17 00:00:00 2001 From: Sergey B Kirpichev Date: Mon, 29 Jul 2024 06:56:40 +0300 Subject: [PATCH 06/70] gh-122234: fix accuracy issues for sum() (#122236) * Use compensated summation for complex sums with floating-point items. This amends #121176. * sum() specializations for floats and complexes now use PyLong_AsDouble() instead of PyLong_AsLongAndOverflow() and compensated summation as well. --- Lib/test/test_builtin.py | 5 ++++ ...-07-24-17-11-51.gh-issue-122234.VxsP_F.rst | 4 ++++ Python/bltinmodule.c | 24 ++++++++++--------- 3 files changed, 22 insertions(+), 11 deletions(-) create mode 100644 Misc/NEWS.d/next/Core and Builtins/2024-07-24-17-11-51.gh-issue-122234.VxsP_F.rst diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py index c6a563cc90fec4..85f139db9bcd45 100644 --- a/Lib/test/test_builtin.py +++ b/Lib/test/test_builtin.py @@ -1778,6 +1778,8 @@ def test_sum(self): self.assertRaises(TypeError, sum, [], '') self.assertRaises(TypeError, sum, [], b'') self.assertRaises(TypeError, sum, [], bytearray()) + self.assertRaises(OverflowError, sum, [1.0, 10**1000]) + self.assertRaises(OverflowError, sum, [1j, 10**1000]) class BadSeq: def __getitem__(self, index): @@ -1803,6 +1805,9 @@ def test_sum_accuracy(self): self.assertEqual(sum([1.0, 10E100, 1.0, -10E100, 2j]), 2+2j) self.assertEqual(sum([2+1j, 10E100j, 1j, -10E100j]), 2+2j) self.assertEqual(sum([1j, 1, 10E100j, 1j, 1.0, -10E100j]), 2+2j) + self.assertEqual(sum([2j, 1., 10E100, 1., -10E100]), 2+2j) + self.assertEqual(sum([1.0, 10**100, 1.0, -10**100]), 2.0) + self.assertEqual(sum([2j, 1.0, 10**100, 1.0, -10**100]), 2+2j) self.assertEqual(sum([0.1j]*10 + [fractions.Fraction(1, 10)]), 0.1+1j) def test_type(self): diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-07-24-17-11-51.gh-issue-122234.VxsP_F.rst b/Misc/NEWS.d/next/Core and Builtins/2024-07-24-17-11-51.gh-issue-122234.VxsP_F.rst new file mode 100644 index 00000000000000..b86d6fbdfc648f --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-07-24-17-11-51.gh-issue-122234.VxsP_F.rst @@ -0,0 +1,4 @@ +Specializations for sums with float and complex inputs in :func:`sum()` now +always use compensated summation. Also, for integer items in above +specializations: :c:func:`PyLong_AsDouble` is used, instead of +:c:func:`PyLong_AsLongAndOverflow`. Patch by Sergey B Kirpichev. diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c index 3f7bf4d568ee46..ae025e767ec838 100644 --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -2687,14 +2687,15 @@ builtin_sum_impl(PyObject *module, PyObject *iterable, PyObject *start) continue; } if (PyLong_Check(item)) { - long value; - int overflow; - value = PyLong_AsLongAndOverflow(item, &overflow); - if (!overflow) { - re_sum.hi += (double)value; + double value = PyLong_AsDouble(item); + if (value != -1.0 || !PyErr_Occurred()) { + re_sum = cs_add(re_sum, value); Py_DECREF(item); continue; } + else { + return NULL; + } } result = PyFloat_FromDouble(cs_to_double(re_sum)); if (result == NULL) { @@ -2736,19 +2737,20 @@ builtin_sum_impl(PyObject *module, PyObject *iterable, PyObject *start) continue; } if (PyLong_Check(item)) { - long value; - int overflow; - value = PyLong_AsLongAndOverflow(item, &overflow); - if (!overflow) { - re_sum.hi += (double)value; + double value = PyLong_AsDouble(item); + if (value != -1.0 || !PyErr_Occurred()) { + re_sum = cs_add(re_sum, value); im_sum.hi += 0.0; Py_DECREF(item); continue; } + else { + return NULL; + } } if (PyFloat_Check(item)) { double value = PyFloat_AS_DOUBLE(item); - re_sum.hi += value; + re_sum = cs_add(re_sum, value); im_sum.hi += 0.0; _Py_DECREF_SPECIALIZED(item, _PyFloat_ExactDealloc); continue; From 3b034d26eb8480f8d12ae11f42d038d24cf8498a Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Mon, 29 Jul 2024 11:49:13 +0300 Subject: [PATCH 07/70] gh-122311: Fix some error messages in pickle (GH-122386) --- Lib/pickle.py | 7 ++++--- Lib/test/pickletester.py | 4 +++- .../Library/2024-07-29-10-24-48.gh-issue-122311.xChV1b.rst | 1 + Modules/_pickle.c | 6 +++--- 4 files changed, 11 insertions(+), 7 deletions(-) create mode 100644 Misc/NEWS.d/next/Library/2024-07-29-10-24-48.gh-issue-122311.xChV1b.rst diff --git a/Lib/pickle.py b/Lib/pickle.py index c6c151b2065f4d..299c9e0e5e5641 100644 --- a/Lib/pickle.py +++ b/Lib/pickle.py @@ -314,16 +314,17 @@ def load_frame(self, frame_size): # Tools used for pickling. def _getattribute(obj, name): + top = obj for subpath in name.split('.'): if subpath == '': raise AttributeError("Can't get local attribute {!r} on {!r}" - .format(name, obj)) + .format(name, top)) try: parent = obj obj = getattr(obj, subpath) except AttributeError: raise AttributeError("Can't get attribute {!r} on {!r}" - .format(name, obj)) from None + .format(name, top)) from None return obj, parent def whichmodule(obj, name): @@ -832,7 +833,7 @@ def save_bytearray(self, obj): if _HAVE_PICKLE_BUFFER: def save_picklebuffer(self, obj): if self.proto < 5: - raise PicklingError("PickleBuffer can only pickled with " + raise PicklingError("PickleBuffer can only be pickled with " "protocol >= 5") with obj.raw() as m: if not m.contiguous: diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py index 174f4ff6d021b2..a2b49e6c92a7b3 100644 --- a/Lib/test/pickletester.py +++ b/Lib/test/pickletester.py @@ -1982,8 +1982,10 @@ def test_picklebuffer_error(self): pb = pickle.PickleBuffer(b"foobar") for proto in range(0, 5): with self.subTest(proto=proto): - with self.assertRaises(pickle.PickleError): + with self.assertRaises(pickle.PickleError) as cm: self.dumps(pb, proto) + self.assertEqual(str(cm.exception), + 'PickleBuffer can only be pickled with protocol >= 5') def test_non_continuous_buffer(self): if self.pickler is pickle._Pickler: diff --git a/Misc/NEWS.d/next/Library/2024-07-29-10-24-48.gh-issue-122311.xChV1b.rst b/Misc/NEWS.d/next/Library/2024-07-29-10-24-48.gh-issue-122311.xChV1b.rst new file mode 100644 index 00000000000000..8d70c610a8dad6 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-07-29-10-24-48.gh-issue-122311.xChV1b.rst @@ -0,0 +1 @@ +Fix some error messages in :mod:`pickle`. diff --git a/Modules/_pickle.c b/Modules/_pickle.c index 861363b68c20c5..452b4aff0237ca 100644 --- a/Modules/_pickle.c +++ b/Modules/_pickle.c @@ -1817,10 +1817,10 @@ get_dotted_path(PyObject *obj, PyObject *name) if (_PyUnicode_EqualToASCIIString(subpath, "")) { if (obj == NULL) PyErr_Format(PyExc_AttributeError, - "Can't pickle local object %R", name); + "Can't get local object %R", name); else PyErr_Format(PyExc_AttributeError, - "Can't pickle local attribute %R on %R", name, obj); + "Can't get local attribute %R on %R", name, obj); Py_DECREF(dotted_path); return NULL; } @@ -2507,7 +2507,7 @@ save_picklebuffer(PickleState *st, PicklerObject *self, PyObject *obj) { if (self->proto < 5) { PyErr_SetString(st->PicklingError, - "PickleBuffer can only pickled with protocol >= 5"); + "PickleBuffer can only be pickled with protocol >= 5"); return -1; } const Py_buffer* view = PyPickleBuffer_GetBuffer(obj); From 0697188084bf61b28f258fbbe867e1010d679b3e Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Mon, 29 Jul 2024 13:40:16 +0300 Subject: [PATCH 08/70] gh-122311: Add more tests for error messages in pickle (GH-122373) --- Lib/test/pickletester.py | 235 ++++++++++++++++++++++++++++++--------- 1 file changed, 185 insertions(+), 50 deletions(-) diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py index a2b49e6c92a7b3..3c936b3bc4029e 100644 --- a/Lib/test/pickletester.py +++ b/Lib/test/pickletester.py @@ -1229,24 +1229,38 @@ def test_find_class(self): self.assertIs(unpickler.find_class('os.path', 'join'), os.path.join) self.assertIs(unpickler4.find_class('builtins', 'str.upper'), str.upper) - with self.assertRaises(AttributeError): + with self.assertRaisesRegex(AttributeError, + r"module 'builtins' has no attribute 'str\.upper'|" + r"Can't get attribute 'str\.upper' on \.spam'|" + r"Can't get attribute 'log\.\.spam' on .spam') - with self.assertRaises(AttributeError): + with self.assertRaisesRegex(AttributeError, + r"Can't get local attribute 'log\.\.spam' on .spam') - with self.assertRaises(AttributeError): + with self.assertRaisesRegex(AttributeError, + "module 'math' has no attribute ''|" + "Can't get attribute '' on )' + r' must return (a )?string or tuple') with self.assertRaisesRegex( ValueError, 'The reducer just failed'): From 9187484dd97f6beb94fc17676014706922e380e1 Mon Sep 17 00:00:00 2001 From: Kirill Podoprigora Date: Mon, 29 Jul 2024 13:59:42 +0300 Subject: [PATCH 09/70] gh-122292: Split up ``Lib/test/test_ast.py`` into a couple of files (#122293) --- Lib/test/test_ast/__init__.py | 7 + Lib/test/test_ast/snippets.py | 602 +++++++++++++++++++++++++++ Lib/test/{ => test_ast}/test_ast.py | 616 +--------------------------- Lib/test/test_ast/utils.py | 15 + Makefile.pre.in | 1 + 5 files changed, 629 insertions(+), 612 deletions(-) create mode 100644 Lib/test/test_ast/__init__.py create mode 100644 Lib/test/test_ast/snippets.py rename Lib/test/{ => test_ast}/test_ast.py (68%) create mode 100644 Lib/test/test_ast/utils.py diff --git a/Lib/test/test_ast/__init__.py b/Lib/test/test_ast/__init__.py new file mode 100644 index 00000000000000..9a89d27ba9f979 --- /dev/null +++ b/Lib/test/test_ast/__init__.py @@ -0,0 +1,7 @@ +import os + +from test import support + + +def load_tests(*args): + return support.load_package_tests(os.path.dirname(__file__), *args) diff --git a/Lib/test/test_ast/snippets.py b/Lib/test/test_ast/snippets.py new file mode 100644 index 00000000000000..95dc3ca03cd38b --- /dev/null +++ b/Lib/test/test_ast/snippets.py @@ -0,0 +1,602 @@ +import ast +import sys + +from test.test_ast.utils import to_tuple + + +# These tests are compiled through "exec" +# There should be at least one test per statement +exec_tests = [ + # Module docstring + "'module docstring'", + # FunctionDef + "def f(): pass", + # FunctionDef with docstring + "def f(): 'function docstring'", + # FunctionDef with arg + "def f(a): pass", + # FunctionDef with arg and default value + "def f(a=0): pass", + # FunctionDef with varargs + "def f(*args): pass", + # FunctionDef with varargs as TypeVarTuple + "def f(*args: *Ts): pass", + # FunctionDef with varargs as unpacked Tuple + "def f(*args: *tuple[int, ...]): pass", + # FunctionDef with varargs as unpacked Tuple *and* TypeVarTuple + "def f(*args: *tuple[int, *Ts]): pass", + # FunctionDef with kwargs + "def f(**kwargs): pass", + # FunctionDef with all kind of args and docstring + "def f(a, b=1, c=None, d=[], e={}, *args, f=42, **kwargs): 'doc for f()'", + # FunctionDef with type annotation on return involving unpacking + "def f() -> tuple[*Ts]: pass", + "def f() -> tuple[int, *Ts]: pass", + "def f() -> tuple[int, *tuple[int, ...]]: pass", + # ClassDef + "class C:pass", + # ClassDef with docstring + "class C: 'docstring for class C'", + # ClassDef, new style class + "class C(object): pass", + # Classdef with multiple bases + "class C(A, B): pass", + # Return + "def f():return 1", + "def f():return", + # Delete + "del v", + # Assign + "v = 1", + "a,b = c", + "(a,b) = c", + "[a,b] = c", + "a[b] = c", + # AnnAssign with unpacked types + "x: tuple[*Ts]", + "x: tuple[int, *Ts]", + "x: tuple[int, *tuple[str, ...]]", + # AugAssign + "v += 1", + "v -= 1", + "v *= 1", + "v @= 1", + "v /= 1", + "v %= 1", + "v **= 1", + "v <<= 1", + "v >>= 1", + "v |= 1", + "v ^= 1", + "v &= 1", + "v //= 1", + # For + "for v in v:pass", + # For-Else + "for v in v:\n pass\nelse:\n pass", + # While + "while v:pass", + # While-Else + "while v:\n pass\nelse:\n pass", + # If-Elif-Else + "if v:pass", + "if a:\n pass\nelif b:\n pass", + "if a:\n pass\nelse:\n pass", + "if a:\n pass\nelif b:\n pass\nelse:\n pass", + "if a:\n pass\nelif b:\n pass\nelif b:\n pass\nelif b:\n pass\nelse:\n pass", + # With + "with x: pass", + "with x, y: pass", + "with x as y: pass", + "with x as y, z as q: pass", + "with (x as y): pass", + "with (x, y): pass", + # Raise + "raise", + "raise Exception('string')", + "raise Exception", + "raise Exception('string') from None", + # TryExcept + "try:\n pass\nexcept Exception:\n pass", + "try:\n pass\nexcept Exception as exc:\n pass", + # TryFinally + "try:\n pass\nfinally:\n pass", + # TryStarExcept + "try:\n pass\nexcept* Exception:\n pass", + "try:\n pass\nexcept* Exception as exc:\n pass", + # TryExceptFinallyElse + "try:\n pass\nexcept Exception:\n pass\nelse: pass\nfinally:\n pass", + "try:\n pass\nexcept Exception as exc:\n pass\nelse: pass\nfinally:\n pass", + "try:\n pass\nexcept* Exception as exc:\n pass\nelse: pass\nfinally:\n pass", + # Assert + "assert v", + # Assert with message + "assert v, 'message'", + # Import + "import sys", + "import foo as bar", + # ImportFrom + "from sys import x as y", + "from sys import v", + # Global + "global v", + # Expr + "1", + # Pass, + "pass", + # Break + "for v in v:break", + # Continue + "for v in v:continue", + # for statements with naked tuples (see http://bugs.python.org/issue6704) + "for a,b in c: pass", + "for (a,b) in c: pass", + "for [a,b] in c: pass", + # Multiline generator expression (test for .lineno & .col_offset) + """( + ( + Aa + , + Bb + ) + for + Aa + , + Bb in Cc + )""", + # dictcomp + "{a : b for w in x for m in p if g}", + # dictcomp with naked tuple + "{a : b for v,w in x}", + # setcomp + "{r for l in x if g}", + # setcomp with naked tuple + "{r for l,m in x}", + # AsyncFunctionDef + "async def f():\n 'async function'\n await something()", + # AsyncFor + "async def f():\n async for e in i: 1\n else: 2", + # AsyncWith + "async def f():\n async with a as b: 1", + # PEP 448: Additional Unpacking Generalizations + "{**{1:2}, 2:3}", + "{*{1, 2}, 3}", + # Function with yield (from) + "def f(): yield 1", + "def f(): yield from []", + # Asynchronous comprehensions + "async def f():\n [i async for b in c]", + # Decorated FunctionDef + "@deco1\n@deco2()\n@deco3(1)\ndef f(): pass", + # Decorated AsyncFunctionDef + "@deco1\n@deco2()\n@deco3(1)\nasync def f(): pass", + # Decorated ClassDef + "@deco1\n@deco2()\n@deco3(1)\nclass C: pass", + # Decorator with generator argument + "@deco(a for a in b)\ndef f(): pass", + # Decorator with attribute + "@a.b.c\ndef f(): pass", + # Simple assignment expression + "(a := 1)", + # Assignment expression in if statement + "if a := foo(): pass", + # Assignment expression in while + "while a := foo(): pass", + # Positional-only arguments + "def f(a, /,): pass", + "def f(a, /, c, d, e): pass", + "def f(a, /, c, *, d, e): pass", + "def f(a, /, c, *, d, e, **kwargs): pass", + # Positional-only arguments with defaults + "def f(a=1, /,): pass", + "def f(a=1, /, b=2, c=4): pass", + "def f(a=1, /, b=2, *, c=4): pass", + "def f(a=1, /, b=2, *, c): pass", + "def f(a=1, /, b=2, *, c=4, **kwargs): pass", + "def f(a=1, /, b=2, *, c, **kwargs): pass", + # Type aliases + "type X = int", + "type X[T] = int", + "type X[T, *Ts, **P] = (T, Ts, P)", + "type X[T: int, *Ts, **P] = (T, Ts, P)", + "type X[T: (int, str), *Ts, **P] = (T, Ts, P)", + "type X[T: int = 1, *Ts = 2, **P =3] = (T, Ts, P)", + # Generic classes + "class X[T]: pass", + "class X[T, *Ts, **P]: pass", + "class X[T: int, *Ts, **P]: pass", + "class X[T: (int, str), *Ts, **P]: pass", + "class X[T: int = 1, *Ts = 2, **P = 3]: pass", + # Generic functions + "def f[T](): pass", + "def f[T, *Ts, **P](): pass", + "def f[T: int, *Ts, **P](): pass", + "def f[T: (int, str), *Ts, **P](): pass", + "def f[T: int = 1, *Ts = 2, **P = 3](): pass", + # Match + "match x:\n\tcase 1:\n\t\tpass", + # Match with _ + "match x:\n\tcase 1:\n\t\tpass\n\tcase _:\n\t\tpass", +] + +# These are compiled through "single" +# because of overlap with "eval", it just tests what +# can't be tested with "eval" +single_tests = [ + "1+2" +] + +# These are compiled through "eval" +# It should test all expressions +eval_tests = [ + # Constant(value=None) + "None", + # True + "True", + # False + "False", + # BoolOp + "a and b", + "a or b", + # BinOp + "a + b", + "a - b", + "a * b", + "a / b", + "a @ b", + "a // b", + "a ** b", + "a % b", + "a >> b", + "a << b", + "a ^ b", + "a | b", + "a & b", + # UnaryOp + "not v", + "+v", + "-v", + "~v", + # Lambda + "lambda:None", + # Dict + "{ 1:2 }", + # Empty dict + "{}", + # Set + "{None,}", + # Multiline dict (test for .lineno & .col_offset) + """{ + 1 + : + 2 + }""", + # Multiline list + """[ + 1 + , + 1 + ]""", + # Multiline tuple + """( + 1 + , + )""", + # Multiline set + """{ + 1 + , + 1 + }""", + # ListComp + "[a for b in c if d]", + # GeneratorExp + "(a for b in c if d)", + # SetComp + "{a for b in c if d}", + # DictComp + "{k: v for k, v in c if d}", + # Comprehensions with multiple for targets + "[(a,b) for a,b in c]", + "[(a,b) for (a,b) in c]", + "[(a,b) for [a,b] in c]", + "{(a,b) for a,b in c}", + "{(a,b) for (a,b) in c}", + "{(a,b) for [a,b] in c}", + "((a,b) for a,b in c)", + "((a,b) for (a,b) in c)", + "((a,b) for [a,b] in c)", + # Async comprehensions - async comprehensions can't work outside an asynchronous function + # + # Yield - yield expressions can't work outside a function + # + # Compare + "1 < 2 < 3", + "a == b", + "a <= b", + "a >= b", + "a != b", + "a is b", + "a is not b", + "a in b", + "a not in b", + # Call without argument + "f()", + # Call + "f(1,2,c=3,*d,**e)", + # Call with multi-character starred + "f(*[0, 1])", + # Call with a generator argument + "f(a for a in b)", + # Constant(value=int()) + "10", + # Complex num + "1j", + # Constant(value=str()) + "'string'", + # Attribute + "a.b", + # Subscript + "a[b:c]", + # Name + "v", + # List + "[1,2,3]", + # Empty list + "[]", + # Tuple + "1,2,3", + # Tuple + "(1,2,3)", + # Empty tuple + "()", + # Combination + "a.b.c.d(a.b[1:2])", + # Slice + "[5][1:]", + "[5][:1]", + "[5][::1]", + "[5][1:1:1]", + # IfExp + "foo() if x else bar()", + # JoinedStr and FormattedValue + "f'{a}'", + "f'{a:.2f}'", + "f'{a!r}'", + "f'foo({a})'", +] + + +def main(): + if __name__ != '__main__': + return + if sys.argv[1:] == ['-g']: + for statements, kind in ((exec_tests, "exec"), (single_tests, "single"), + (eval_tests, "eval")): + print(kind+"_results = [") + for statement in statements: + tree = ast.parse(statement, "?", kind) + print("%r," % (to_tuple(tree),)) + print("]") + print("main()") + raise SystemExit + unittest.main() + +#### EVERYTHING BELOW IS GENERATED BY python Lib/test/test_ast/snippets.py -g ##### +exec_results = [ +('Module', [('Expr', (1, 0, 1, 18), ('Constant', (1, 0, 1, 18), 'module docstring', None))], []), +('Module', [('FunctionDef', (1, 0, 1, 13), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 9, 1, 13))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 29), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (1, 9, 1, 29), ('Constant', (1, 9, 1, 29), 'function docstring', None))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 14), 'f', ('arguments', [], [('arg', (1, 6, 1, 7), 'a', None, None)], None, [], [], None, []), [('Pass', (1, 10, 1, 14))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 16), 'f', ('arguments', [], [('arg', (1, 6, 1, 7), 'a', None, None)], None, [], [], None, [('Constant', (1, 8, 1, 9), 0, None)]), [('Pass', (1, 12, 1, 16))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 18), 'f', ('arguments', [], [], ('arg', (1, 7, 1, 11), 'args', None, None), [], [], None, []), [('Pass', (1, 14, 1, 18))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 23), 'f', ('arguments', [], [], ('arg', (1, 7, 1, 16), 'args', ('Starred', (1, 13, 1, 16), ('Name', (1, 14, 1, 16), 'Ts', ('Load',)), ('Load',)), None), [], [], None, []), [('Pass', (1, 19, 1, 23))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 36), 'f', ('arguments', [], [], ('arg', (1, 7, 1, 29), 'args', ('Starred', (1, 13, 1, 29), ('Subscript', (1, 14, 1, 29), ('Name', (1, 14, 1, 19), 'tuple', ('Load',)), ('Tuple', (1, 20, 1, 28), [('Name', (1, 20, 1, 23), 'int', ('Load',)), ('Constant', (1, 25, 1, 28), Ellipsis, None)], ('Load',)), ('Load',)), ('Load',)), None), [], [], None, []), [('Pass', (1, 32, 1, 36))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 36), 'f', ('arguments', [], [], ('arg', (1, 7, 1, 29), 'args', ('Starred', (1, 13, 1, 29), ('Subscript', (1, 14, 1, 29), ('Name', (1, 14, 1, 19), 'tuple', ('Load',)), ('Tuple', (1, 20, 1, 28), [('Name', (1, 20, 1, 23), 'int', ('Load',)), ('Starred', (1, 25, 1, 28), ('Name', (1, 26, 1, 28), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), ('Load',)), None), [], [], None, []), [('Pass', (1, 32, 1, 36))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 21), 'f', ('arguments', [], [], None, [], [], ('arg', (1, 8, 1, 14), 'kwargs', None, None), []), [('Pass', (1, 17, 1, 21))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 71), 'f', ('arguments', [], [('arg', (1, 6, 1, 7), 'a', None, None), ('arg', (1, 9, 1, 10), 'b', None, None), ('arg', (1, 14, 1, 15), 'c', None, None), ('arg', (1, 22, 1, 23), 'd', None, None), ('arg', (1, 28, 1, 29), 'e', None, None)], ('arg', (1, 35, 1, 39), 'args', None, None), [('arg', (1, 41, 1, 42), 'f', None, None)], [('Constant', (1, 43, 1, 45), 42, None)], ('arg', (1, 49, 1, 55), 'kwargs', None, None), [('Constant', (1, 11, 1, 12), 1, None), ('Constant', (1, 16, 1, 20), None, None), ('List', (1, 24, 1, 26), [], ('Load',)), ('Dict', (1, 30, 1, 32), [], [])]), [('Expr', (1, 58, 1, 71), ('Constant', (1, 58, 1, 71), 'doc for f()', None))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 27), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 23, 1, 27))], [], ('Subscript', (1, 11, 1, 21), ('Name', (1, 11, 1, 16), 'tuple', ('Load',)), ('Tuple', (1, 17, 1, 20), [('Starred', (1, 17, 1, 20), ('Name', (1, 18, 1, 20), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 32), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 28, 1, 32))], [], ('Subscript', (1, 11, 1, 26), ('Name', (1, 11, 1, 16), 'tuple', ('Load',)), ('Tuple', (1, 17, 1, 25), [('Name', (1, 17, 1, 20), 'int', ('Load',)), ('Starred', (1, 22, 1, 25), ('Name', (1, 23, 1, 25), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 45), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 41, 1, 45))], [], ('Subscript', (1, 11, 1, 39), ('Name', (1, 11, 1, 16), 'tuple', ('Load',)), ('Tuple', (1, 17, 1, 38), [('Name', (1, 17, 1, 20), 'int', ('Load',)), ('Starred', (1, 22, 1, 38), ('Subscript', (1, 23, 1, 38), ('Name', (1, 23, 1, 28), 'tuple', ('Load',)), ('Tuple', (1, 29, 1, 37), [('Name', (1, 29, 1, 32), 'int', ('Load',)), ('Constant', (1, 34, 1, 37), Ellipsis, None)], ('Load',)), ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, [])], []), +('Module', [('ClassDef', (1, 0, 1, 12), 'C', [], [], [('Pass', (1, 8, 1, 12))], [], [])], []), +('Module', [('ClassDef', (1, 0, 1, 32), 'C', [], [], [('Expr', (1, 9, 1, 32), ('Constant', (1, 9, 1, 32), 'docstring for class C', None))], [], [])], []), +('Module', [('ClassDef', (1, 0, 1, 21), 'C', [('Name', (1, 8, 1, 14), 'object', ('Load',))], [], [('Pass', (1, 17, 1, 21))], [], [])], []), +('Module', [('ClassDef', (1, 0, 1, 19), 'C', [('Name', (1, 8, 1, 9), 'A', ('Load',)), ('Name', (1, 11, 1, 12), 'B', ('Load',))], [], [('Pass', (1, 15, 1, 19))], [], [])], []), +('Module', [('FunctionDef', (1, 0, 1, 16), 'f', ('arguments', [], [], None, [], [], None, []), [('Return', (1, 8, 1, 16), ('Constant', (1, 15, 1, 16), 1, None))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 14), 'f', ('arguments', [], [], None, [], [], None, []), [('Return', (1, 8, 1, 14), None)], [], None, None, [])], []), +('Module', [('Delete', (1, 0, 1, 5), [('Name', (1, 4, 1, 5), 'v', ('Del',))])], []), +('Module', [('Assign', (1, 0, 1, 5), [('Name', (1, 0, 1, 1), 'v', ('Store',))], ('Constant', (1, 4, 1, 5), 1, None), None)], []), +('Module', [('Assign', (1, 0, 1, 7), [('Tuple', (1, 0, 1, 3), [('Name', (1, 0, 1, 1), 'a', ('Store',)), ('Name', (1, 2, 1, 3), 'b', ('Store',))], ('Store',))], ('Name', (1, 6, 1, 7), 'c', ('Load',)), None)], []), +('Module', [('Assign', (1, 0, 1, 9), [('Tuple', (1, 0, 1, 5), [('Name', (1, 1, 1, 2), 'a', ('Store',)), ('Name', (1, 3, 1, 4), 'b', ('Store',))], ('Store',))], ('Name', (1, 8, 1, 9), 'c', ('Load',)), None)], []), +('Module', [('Assign', (1, 0, 1, 9), [('List', (1, 0, 1, 5), [('Name', (1, 1, 1, 2), 'a', ('Store',)), ('Name', (1, 3, 1, 4), 'b', ('Store',))], ('Store',))], ('Name', (1, 8, 1, 9), 'c', ('Load',)), None)], []), +('Module', [('Assign', (1, 0, 1, 8), [('Subscript', (1, 0, 1, 4), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Name', (1, 2, 1, 3), 'b', ('Load',)), ('Store',))], ('Name', (1, 7, 1, 8), 'c', ('Load',)), None)], []), +('Module', [('AnnAssign', (1, 0, 1, 13), ('Name', (1, 0, 1, 1), 'x', ('Store',)), ('Subscript', (1, 3, 1, 13), ('Name', (1, 3, 1, 8), 'tuple', ('Load',)), ('Tuple', (1, 9, 1, 12), [('Starred', (1, 9, 1, 12), ('Name', (1, 10, 1, 12), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, 1)], []), +('Module', [('AnnAssign', (1, 0, 1, 18), ('Name', (1, 0, 1, 1), 'x', ('Store',)), ('Subscript', (1, 3, 1, 18), ('Name', (1, 3, 1, 8), 'tuple', ('Load',)), ('Tuple', (1, 9, 1, 17), [('Name', (1, 9, 1, 12), 'int', ('Load',)), ('Starred', (1, 14, 1, 17), ('Name', (1, 15, 1, 17), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, 1)], []), +('Module', [('AnnAssign', (1, 0, 1, 31), ('Name', (1, 0, 1, 1), 'x', ('Store',)), ('Subscript', (1, 3, 1, 31), ('Name', (1, 3, 1, 8), 'tuple', ('Load',)), ('Tuple', (1, 9, 1, 30), [('Name', (1, 9, 1, 12), 'int', ('Load',)), ('Starred', (1, 14, 1, 30), ('Subscript', (1, 15, 1, 30), ('Name', (1, 15, 1, 20), 'tuple', ('Load',)), ('Tuple', (1, 21, 1, 29), [('Name', (1, 21, 1, 24), 'str', ('Load',)), ('Constant', (1, 26, 1, 29), Ellipsis, None)], ('Load',)), ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, 1)], []), +('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Add',), ('Constant', (1, 5, 1, 6), 1, None))], []), +('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Sub',), ('Constant', (1, 5, 1, 6), 1, None))], []), +('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Mult',), ('Constant', (1, 5, 1, 6), 1, None))], []), +('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('MatMult',), ('Constant', (1, 5, 1, 6), 1, None))], []), +('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Div',), ('Constant', (1, 5, 1, 6), 1, None))], []), +('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Mod',), ('Constant', (1, 5, 1, 6), 1, None))], []), +('Module', [('AugAssign', (1, 0, 1, 7), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Pow',), ('Constant', (1, 6, 1, 7), 1, None))], []), +('Module', [('AugAssign', (1, 0, 1, 7), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('LShift',), ('Constant', (1, 6, 1, 7), 1, None))], []), +('Module', [('AugAssign', (1, 0, 1, 7), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('RShift',), ('Constant', (1, 6, 1, 7), 1, None))], []), +('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('BitOr',), ('Constant', (1, 5, 1, 6), 1, None))], []), +('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('BitXor',), ('Constant', (1, 5, 1, 6), 1, None))], []), +('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('BitAnd',), ('Constant', (1, 5, 1, 6), 1, None))], []), +('Module', [('AugAssign', (1, 0, 1, 7), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('FloorDiv',), ('Constant', (1, 6, 1, 7), 1, None))], []), +('Module', [('For', (1, 0, 1, 15), ('Name', (1, 4, 1, 5), 'v', ('Store',)), ('Name', (1, 9, 1, 10), 'v', ('Load',)), [('Pass', (1, 11, 1, 15))], [], None)], []), +('Module', [('For', (1, 0, 4, 6), ('Name', (1, 4, 1, 5), 'v', ('Store',)), ('Name', (1, 9, 1, 10), 'v', ('Load',)), [('Pass', (2, 2, 2, 6))], [('Pass', (4, 2, 4, 6))], None)], []), +('Module', [('While', (1, 0, 1, 12), ('Name', (1, 6, 1, 7), 'v', ('Load',)), [('Pass', (1, 8, 1, 12))], [])], []), +('Module', [('While', (1, 0, 4, 6), ('Name', (1, 6, 1, 7), 'v', ('Load',)), [('Pass', (2, 2, 2, 6))], [('Pass', (4, 2, 4, 6))])], []), +('Module', [('If', (1, 0, 1, 9), ('Name', (1, 3, 1, 4), 'v', ('Load',)), [('Pass', (1, 5, 1, 9))], [])], []), +('Module', [('If', (1, 0, 4, 6), ('Name', (1, 3, 1, 4), 'a', ('Load',)), [('Pass', (2, 2, 2, 6))], [('If', (3, 0, 4, 6), ('Name', (3, 5, 3, 6), 'b', ('Load',)), [('Pass', (4, 2, 4, 6))], [])])], []), +('Module', [('If', (1, 0, 4, 6), ('Name', (1, 3, 1, 4), 'a', ('Load',)), [('Pass', (2, 2, 2, 6))], [('Pass', (4, 2, 4, 6))])], []), +('Module', [('If', (1, 0, 6, 6), ('Name', (1, 3, 1, 4), 'a', ('Load',)), [('Pass', (2, 2, 2, 6))], [('If', (3, 0, 6, 6), ('Name', (3, 5, 3, 6), 'b', ('Load',)), [('Pass', (4, 2, 4, 6))], [('Pass', (6, 2, 6, 6))])])], []), +('Module', [('If', (1, 0, 10, 6), ('Name', (1, 3, 1, 4), 'a', ('Load',)), [('Pass', (2, 2, 2, 6))], [('If', (3, 0, 10, 6), ('Name', (3, 5, 3, 6), 'b', ('Load',)), [('Pass', (4, 2, 4, 6))], [('If', (5, 0, 10, 6), ('Name', (5, 5, 5, 6), 'b', ('Load',)), [('Pass', (6, 2, 6, 6))], [('If', (7, 0, 10, 6), ('Name', (7, 5, 7, 6), 'b', ('Load',)), [('Pass', (8, 2, 8, 6))], [('Pass', (10, 2, 10, 6))])])])])], []), +('Module', [('With', (1, 0, 1, 12), [('withitem', ('Name', (1, 5, 1, 6), 'x', ('Load',)), None)], [('Pass', (1, 8, 1, 12))], None)], []), +('Module', [('With', (1, 0, 1, 15), [('withitem', ('Name', (1, 5, 1, 6), 'x', ('Load',)), None), ('withitem', ('Name', (1, 8, 1, 9), 'y', ('Load',)), None)], [('Pass', (1, 11, 1, 15))], None)], []), +('Module', [('With', (1, 0, 1, 17), [('withitem', ('Name', (1, 5, 1, 6), 'x', ('Load',)), ('Name', (1, 10, 1, 11), 'y', ('Store',)))], [('Pass', (1, 13, 1, 17))], None)], []), +('Module', [('With', (1, 0, 1, 25), [('withitem', ('Name', (1, 5, 1, 6), 'x', ('Load',)), ('Name', (1, 10, 1, 11), 'y', ('Store',))), ('withitem', ('Name', (1, 13, 1, 14), 'z', ('Load',)), ('Name', (1, 18, 1, 19), 'q', ('Store',)))], [('Pass', (1, 21, 1, 25))], None)], []), +('Module', [('With', (1, 0, 1, 19), [('withitem', ('Name', (1, 6, 1, 7), 'x', ('Load',)), ('Name', (1, 11, 1, 12), 'y', ('Store',)))], [('Pass', (1, 15, 1, 19))], None)], []), +('Module', [('With', (1, 0, 1, 17), [('withitem', ('Name', (1, 6, 1, 7), 'x', ('Load',)), None), ('withitem', ('Name', (1, 9, 1, 10), 'y', ('Load',)), None)], [('Pass', (1, 13, 1, 17))], None)], []), +('Module', [('Raise', (1, 0, 1, 5), None, None)], []), +('Module', [('Raise', (1, 0, 1, 25), ('Call', (1, 6, 1, 25), ('Name', (1, 6, 1, 15), 'Exception', ('Load',)), [('Constant', (1, 16, 1, 24), 'string', None)], []), None)], []), +('Module', [('Raise', (1, 0, 1, 15), ('Name', (1, 6, 1, 15), 'Exception', ('Load',)), None)], []), +('Module', [('Raise', (1, 0, 1, 35), ('Call', (1, 6, 1, 25), ('Name', (1, 6, 1, 15), 'Exception', ('Load',)), [('Constant', (1, 16, 1, 24), 'string', None)], []), ('Constant', (1, 31, 1, 35), None, None))], []), +('Module', [('Try', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 7, 3, 16), 'Exception', ('Load',)), None, [('Pass', (4, 2, 4, 6))])], [], [])], []), +('Module', [('Try', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 7, 3, 16), 'Exception', ('Load',)), 'exc', [('Pass', (4, 2, 4, 6))])], [], [])], []), +('Module', [('Try', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [], [], [('Pass', (4, 2, 4, 6))])], []), +('Module', [('TryStar', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 8, 3, 17), 'Exception', ('Load',)), None, [('Pass', (4, 2, 4, 6))])], [], [])], []), +('Module', [('TryStar', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 8, 3, 17), 'Exception', ('Load',)), 'exc', [('Pass', (4, 2, 4, 6))])], [], [])], []), +('Module', [('Try', (1, 0, 7, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 7, 3, 16), 'Exception', ('Load',)), None, [('Pass', (4, 2, 4, 6))])], [('Pass', (5, 7, 5, 11))], [('Pass', (7, 2, 7, 6))])], []), +('Module', [('Try', (1, 0, 7, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 7, 3, 16), 'Exception', ('Load',)), 'exc', [('Pass', (4, 2, 4, 6))])], [('Pass', (5, 7, 5, 11))], [('Pass', (7, 2, 7, 6))])], []), +('Module', [('TryStar', (1, 0, 7, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 8, 3, 17), 'Exception', ('Load',)), 'exc', [('Pass', (4, 2, 4, 6))])], [('Pass', (5, 7, 5, 11))], [('Pass', (7, 2, 7, 6))])], []), +('Module', [('Assert', (1, 0, 1, 8), ('Name', (1, 7, 1, 8), 'v', ('Load',)), None)], []), +('Module', [('Assert', (1, 0, 1, 19), ('Name', (1, 7, 1, 8), 'v', ('Load',)), ('Constant', (1, 10, 1, 19), 'message', None))], []), +('Module', [('Import', (1, 0, 1, 10), [('alias', (1, 7, 1, 10), 'sys', None)])], []), +('Module', [('Import', (1, 0, 1, 17), [('alias', (1, 7, 1, 17), 'foo', 'bar')])], []), +('Module', [('ImportFrom', (1, 0, 1, 22), 'sys', [('alias', (1, 16, 1, 22), 'x', 'y')], 0)], []), +('Module', [('ImportFrom', (1, 0, 1, 17), 'sys', [('alias', (1, 16, 1, 17), 'v', None)], 0)], []), +('Module', [('Global', (1, 0, 1, 8), ['v'])], []), +('Module', [('Expr', (1, 0, 1, 1), ('Constant', (1, 0, 1, 1), 1, None))], []), +('Module', [('Pass', (1, 0, 1, 4))], []), +('Module', [('For', (1, 0, 1, 16), ('Name', (1, 4, 1, 5), 'v', ('Store',)), ('Name', (1, 9, 1, 10), 'v', ('Load',)), [('Break', (1, 11, 1, 16))], [], None)], []), +('Module', [('For', (1, 0, 1, 19), ('Name', (1, 4, 1, 5), 'v', ('Store',)), ('Name', (1, 9, 1, 10), 'v', ('Load',)), [('Continue', (1, 11, 1, 19))], [], None)], []), +('Module', [('For', (1, 0, 1, 18), ('Tuple', (1, 4, 1, 7), [('Name', (1, 4, 1, 5), 'a', ('Store',)), ('Name', (1, 6, 1, 7), 'b', ('Store',))], ('Store',)), ('Name', (1, 11, 1, 12), 'c', ('Load',)), [('Pass', (1, 14, 1, 18))], [], None)], []), +('Module', [('For', (1, 0, 1, 20), ('Tuple', (1, 4, 1, 9), [('Name', (1, 5, 1, 6), 'a', ('Store',)), ('Name', (1, 7, 1, 8), 'b', ('Store',))], ('Store',)), ('Name', (1, 13, 1, 14), 'c', ('Load',)), [('Pass', (1, 16, 1, 20))], [], None)], []), +('Module', [('For', (1, 0, 1, 20), ('List', (1, 4, 1, 9), [('Name', (1, 5, 1, 6), 'a', ('Store',)), ('Name', (1, 7, 1, 8), 'b', ('Store',))], ('Store',)), ('Name', (1, 13, 1, 14), 'c', ('Load',)), [('Pass', (1, 16, 1, 20))], [], None)], []), +('Module', [('Expr', (1, 0, 11, 5), ('GeneratorExp', (1, 0, 11, 5), ('Tuple', (2, 4, 6, 5), [('Name', (3, 4, 3, 6), 'Aa', ('Load',)), ('Name', (5, 7, 5, 9), 'Bb', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (8, 4, 10, 6), [('Name', (8, 4, 8, 6), 'Aa', ('Store',)), ('Name', (10, 4, 10, 6), 'Bb', ('Store',))], ('Store',)), ('Name', (10, 10, 10, 12), 'Cc', ('Load',)), [], 0)]))], []), +('Module', [('Expr', (1, 0, 1, 34), ('DictComp', (1, 0, 1, 34), ('Name', (1, 1, 1, 2), 'a', ('Load',)), ('Name', (1, 5, 1, 6), 'b', ('Load',)), [('comprehension', ('Name', (1, 11, 1, 12), 'w', ('Store',)), ('Name', (1, 16, 1, 17), 'x', ('Load',)), [], 0), ('comprehension', ('Name', (1, 22, 1, 23), 'm', ('Store',)), ('Name', (1, 27, 1, 28), 'p', ('Load',)), [('Name', (1, 32, 1, 33), 'g', ('Load',))], 0)]))], []), +('Module', [('Expr', (1, 0, 1, 20), ('DictComp', (1, 0, 1, 20), ('Name', (1, 1, 1, 2), 'a', ('Load',)), ('Name', (1, 5, 1, 6), 'b', ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 14), [('Name', (1, 11, 1, 12), 'v', ('Store',)), ('Name', (1, 13, 1, 14), 'w', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'x', ('Load',)), [], 0)]))], []), +('Module', [('Expr', (1, 0, 1, 19), ('SetComp', (1, 0, 1, 19), ('Name', (1, 1, 1, 2), 'r', ('Load',)), [('comprehension', ('Name', (1, 7, 1, 8), 'l', ('Store',)), ('Name', (1, 12, 1, 13), 'x', ('Load',)), [('Name', (1, 17, 1, 18), 'g', ('Load',))], 0)]))], []), +('Module', [('Expr', (1, 0, 1, 16), ('SetComp', (1, 0, 1, 16), ('Name', (1, 1, 1, 2), 'r', ('Load',)), [('comprehension', ('Tuple', (1, 7, 1, 10), [('Name', (1, 7, 1, 8), 'l', ('Store',)), ('Name', (1, 9, 1, 10), 'm', ('Store',))], ('Store',)), ('Name', (1, 14, 1, 15), 'x', ('Load',)), [], 0)]))], []), +('Module', [('AsyncFunctionDef', (1, 0, 3, 18), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (2, 1, 2, 17), ('Constant', (2, 1, 2, 17), 'async function', None)), ('Expr', (3, 1, 3, 18), ('Await', (3, 1, 3, 18), ('Call', (3, 7, 3, 18), ('Name', (3, 7, 3, 16), 'something', ('Load',)), [], [])))], [], None, None, [])], []), +('Module', [('AsyncFunctionDef', (1, 0, 3, 8), 'f', ('arguments', [], [], None, [], [], None, []), [('AsyncFor', (2, 1, 3, 8), ('Name', (2, 11, 2, 12), 'e', ('Store',)), ('Name', (2, 16, 2, 17), 'i', ('Load',)), [('Expr', (2, 19, 2, 20), ('Constant', (2, 19, 2, 20), 1, None))], [('Expr', (3, 7, 3, 8), ('Constant', (3, 7, 3, 8), 2, None))], None)], [], None, None, [])], []), +('Module', [('AsyncFunctionDef', (1, 0, 2, 21), 'f', ('arguments', [], [], None, [], [], None, []), [('AsyncWith', (2, 1, 2, 21), [('withitem', ('Name', (2, 12, 2, 13), 'a', ('Load',)), ('Name', (2, 17, 2, 18), 'b', ('Store',)))], [('Expr', (2, 20, 2, 21), ('Constant', (2, 20, 2, 21), 1, None))], None)], [], None, None, [])], []), +('Module', [('Expr', (1, 0, 1, 14), ('Dict', (1, 0, 1, 14), [None, ('Constant', (1, 10, 1, 11), 2, None)], [('Dict', (1, 3, 1, 8), [('Constant', (1, 4, 1, 5), 1, None)], [('Constant', (1, 6, 1, 7), 2, None)]), ('Constant', (1, 12, 1, 13), 3, None)]))], []), +('Module', [('Expr', (1, 0, 1, 12), ('Set', (1, 0, 1, 12), [('Starred', (1, 1, 1, 8), ('Set', (1, 2, 1, 8), [('Constant', (1, 3, 1, 4), 1, None), ('Constant', (1, 6, 1, 7), 2, None)]), ('Load',)), ('Constant', (1, 10, 1, 11), 3, None)]))], []), +('Module', [('FunctionDef', (1, 0, 1, 16), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (1, 9, 1, 16), ('Yield', (1, 9, 1, 16), ('Constant', (1, 15, 1, 16), 1, None)))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 22), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (1, 9, 1, 22), ('YieldFrom', (1, 9, 1, 22), ('List', (1, 20, 1, 22), [], ('Load',))))], [], None, None, [])], []), +('Module', [('AsyncFunctionDef', (1, 0, 2, 21), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (2, 1, 2, 21), ('ListComp', (2, 1, 2, 21), ('Name', (2, 2, 2, 3), 'i', ('Load',)), [('comprehension', ('Name', (2, 14, 2, 15), 'b', ('Store',)), ('Name', (2, 19, 2, 20), 'c', ('Load',)), [], 1)]))], [], None, None, [])], []), +('Module', [('FunctionDef', (4, 0, 4, 13), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (4, 9, 4, 13))], [('Name', (1, 1, 1, 6), 'deco1', ('Load',)), ('Call', (2, 1, 2, 8), ('Name', (2, 1, 2, 6), 'deco2', ('Load',)), [], []), ('Call', (3, 1, 3, 9), ('Name', (3, 1, 3, 6), 'deco3', ('Load',)), [('Constant', (3, 7, 3, 8), 1, None)], [])], None, None, [])], []), +('Module', [('AsyncFunctionDef', (4, 0, 4, 19), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (4, 15, 4, 19))], [('Name', (1, 1, 1, 6), 'deco1', ('Load',)), ('Call', (2, 1, 2, 8), ('Name', (2, 1, 2, 6), 'deco2', ('Load',)), [], []), ('Call', (3, 1, 3, 9), ('Name', (3, 1, 3, 6), 'deco3', ('Load',)), [('Constant', (3, 7, 3, 8), 1, None)], [])], None, None, [])], []), +('Module', [('ClassDef', (4, 0, 4, 13), 'C', [], [], [('Pass', (4, 9, 4, 13))], [('Name', (1, 1, 1, 6), 'deco1', ('Load',)), ('Call', (2, 1, 2, 8), ('Name', (2, 1, 2, 6), 'deco2', ('Load',)), [], []), ('Call', (3, 1, 3, 9), ('Name', (3, 1, 3, 6), 'deco3', ('Load',)), [('Constant', (3, 7, 3, 8), 1, None)], [])], [])], []), +('Module', [('FunctionDef', (2, 0, 2, 13), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (2, 9, 2, 13))], [('Call', (1, 1, 1, 19), ('Name', (1, 1, 1, 5), 'deco', ('Load',)), [('GeneratorExp', (1, 5, 1, 19), ('Name', (1, 6, 1, 7), 'a', ('Load',)), [('comprehension', ('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 17, 1, 18), 'b', ('Load',)), [], 0)])], [])], None, None, [])], []), +('Module', [('FunctionDef', (2, 0, 2, 13), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (2, 9, 2, 13))], [('Attribute', (1, 1, 1, 6), ('Attribute', (1, 1, 1, 4), ('Name', (1, 1, 1, 2), 'a', ('Load',)), 'b', ('Load',)), 'c', ('Load',))], None, None, [])], []), +('Module', [('Expr', (1, 0, 1, 8), ('NamedExpr', (1, 1, 1, 7), ('Name', (1, 1, 1, 2), 'a', ('Store',)), ('Constant', (1, 6, 1, 7), 1, None)))], []), +('Module', [('If', (1, 0, 1, 19), ('NamedExpr', (1, 3, 1, 13), ('Name', (1, 3, 1, 4), 'a', ('Store',)), ('Call', (1, 8, 1, 13), ('Name', (1, 8, 1, 11), 'foo', ('Load',)), [], [])), [('Pass', (1, 15, 1, 19))], [])], []), +('Module', [('While', (1, 0, 1, 22), ('NamedExpr', (1, 6, 1, 16), ('Name', (1, 6, 1, 7), 'a', ('Store',)), ('Call', (1, 11, 1, 16), ('Name', (1, 11, 1, 14), 'foo', ('Load',)), [], [])), [('Pass', (1, 18, 1, 22))], [])], []), +('Module', [('FunctionDef', (1, 0, 1, 18), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [], None, [], [], None, []), [('Pass', (1, 14, 1, 18))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 26), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 12, 1, 13), 'c', None, None), ('arg', (1, 15, 1, 16), 'd', None, None), ('arg', (1, 18, 1, 19), 'e', None, None)], None, [], [], None, []), [('Pass', (1, 22, 1, 26))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 29), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 12, 1, 13), 'c', None, None)], None, [('arg', (1, 18, 1, 19), 'd', None, None), ('arg', (1, 21, 1, 22), 'e', None, None)], [None, None], None, []), [('Pass', (1, 25, 1, 29))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 39), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 12, 1, 13), 'c', None, None)], None, [('arg', (1, 18, 1, 19), 'd', None, None), ('arg', (1, 21, 1, 22), 'e', None, None)], [None, None], ('arg', (1, 26, 1, 32), 'kwargs', None, None), []), [('Pass', (1, 35, 1, 39))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 20), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [], None, [], [], None, [('Constant', (1, 8, 1, 9), 1, None)]), [('Pass', (1, 16, 1, 20))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 29), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None), ('arg', (1, 19, 1, 20), 'c', None, None)], None, [], [], None, [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None), ('Constant', (1, 21, 1, 22), 4, None)]), [('Pass', (1, 25, 1, 29))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 32), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None)], None, [('arg', (1, 22, 1, 23), 'c', None, None)], [('Constant', (1, 24, 1, 25), 4, None)], None, [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None)]), [('Pass', (1, 28, 1, 32))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 30), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None)], None, [('arg', (1, 22, 1, 23), 'c', None, None)], [None], None, [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None)]), [('Pass', (1, 26, 1, 30))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 42), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None)], None, [('arg', (1, 22, 1, 23), 'c', None, None)], [('Constant', (1, 24, 1, 25), 4, None)], ('arg', (1, 29, 1, 35), 'kwargs', None, None), [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None)]), [('Pass', (1, 38, 1, 42))], [], None, None, [])], []), +('Module', [('FunctionDef', (1, 0, 1, 40), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None)], None, [('arg', (1, 22, 1, 23), 'c', None, None)], [None], ('arg', (1, 27, 1, 33), 'kwargs', None, None), [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None)]), [('Pass', (1, 36, 1, 40))], [], None, None, [])], []), +('Module', [('TypeAlias', (1, 0, 1, 12), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [], ('Name', (1, 9, 1, 12), 'int', ('Load',)))], []), +('Module', [('TypeAlias', (1, 0, 1, 15), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 8), 'T', None, None)], ('Name', (1, 12, 1, 15), 'int', ('Load',)))], []), +('Module', [('TypeAlias', (1, 0, 1, 32), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 8), 'T', None, None), ('TypeVarTuple', (1, 10, 1, 13), 'Ts', None), ('ParamSpec', (1, 15, 1, 18), 'P', None)], ('Tuple', (1, 22, 1, 32), [('Name', (1, 23, 1, 24), 'T', ('Load',)), ('Name', (1, 26, 1, 28), 'Ts', ('Load',)), ('Name', (1, 30, 1, 31), 'P', ('Load',))], ('Load',)))], []), +('Module', [('TypeAlias', (1, 0, 1, 37), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 13), 'T', ('Name', (1, 10, 1, 13), 'int', ('Load',)), None), ('TypeVarTuple', (1, 15, 1, 18), 'Ts', None), ('ParamSpec', (1, 20, 1, 23), 'P', None)], ('Tuple', (1, 27, 1, 37), [('Name', (1, 28, 1, 29), 'T', ('Load',)), ('Name', (1, 31, 1, 33), 'Ts', ('Load',)), ('Name', (1, 35, 1, 36), 'P', ('Load',))], ('Load',)))], []), +('Module', [('TypeAlias', (1, 0, 1, 44), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 20), 'T', ('Tuple', (1, 10, 1, 20), [('Name', (1, 11, 1, 14), 'int', ('Load',)), ('Name', (1, 16, 1, 19), 'str', ('Load',))], ('Load',)), None), ('TypeVarTuple', (1, 22, 1, 25), 'Ts', None), ('ParamSpec', (1, 27, 1, 30), 'P', None)], ('Tuple', (1, 34, 1, 44), [('Name', (1, 35, 1, 36), 'T', ('Load',)), ('Name', (1, 38, 1, 40), 'Ts', ('Load',)), ('Name', (1, 42, 1, 43), 'P', ('Load',))], ('Load',)))], []), +('Module', [('TypeAlias', (1, 0, 1, 48), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 17), 'T', ('Name', (1, 10, 1, 13), 'int', ('Load',)), ('Constant', (1, 16, 1, 17), 1, None)), ('TypeVarTuple', (1, 19, 1, 26), 'Ts', ('Constant', (1, 25, 1, 26), 2, None)), ('ParamSpec', (1, 28, 1, 34), 'P', ('Constant', (1, 33, 1, 34), 3, None))], ('Tuple', (1, 38, 1, 48), [('Name', (1, 39, 1, 40), 'T', ('Load',)), ('Name', (1, 42, 1, 44), 'Ts', ('Load',)), ('Name', (1, 46, 1, 47), 'P', ('Load',))], ('Load',)))], []), +('Module', [('ClassDef', (1, 0, 1, 16), 'X', [], [], [('Pass', (1, 12, 1, 16))], [], [('TypeVar', (1, 8, 1, 9), 'T', None, None)])], []), +('Module', [('ClassDef', (1, 0, 1, 26), 'X', [], [], [('Pass', (1, 22, 1, 26))], [], [('TypeVar', (1, 8, 1, 9), 'T', None, None), ('TypeVarTuple', (1, 11, 1, 14), 'Ts', None), ('ParamSpec', (1, 16, 1, 19), 'P', None)])], []), +('Module', [('ClassDef', (1, 0, 1, 31), 'X', [], [], [('Pass', (1, 27, 1, 31))], [], [('TypeVar', (1, 8, 1, 14), 'T', ('Name', (1, 11, 1, 14), 'int', ('Load',)), None), ('TypeVarTuple', (1, 16, 1, 19), 'Ts', None), ('ParamSpec', (1, 21, 1, 24), 'P', None)])], []), +('Module', [('ClassDef', (1, 0, 1, 38), 'X', [], [], [('Pass', (1, 34, 1, 38))], [], [('TypeVar', (1, 8, 1, 21), 'T', ('Tuple', (1, 11, 1, 21), [('Name', (1, 12, 1, 15), 'int', ('Load',)), ('Name', (1, 17, 1, 20), 'str', ('Load',))], ('Load',)), None), ('TypeVarTuple', (1, 23, 1, 26), 'Ts', None), ('ParamSpec', (1, 28, 1, 31), 'P', None)])], []), +('Module', [('ClassDef', (1, 0, 1, 43), 'X', [], [], [('Pass', (1, 39, 1, 43))], [], [('TypeVar', (1, 8, 1, 18), 'T', ('Name', (1, 11, 1, 14), 'int', ('Load',)), ('Constant', (1, 17, 1, 18), 1, None)), ('TypeVarTuple', (1, 20, 1, 27), 'Ts', ('Constant', (1, 26, 1, 27), 2, None)), ('ParamSpec', (1, 29, 1, 36), 'P', ('Constant', (1, 35, 1, 36), 3, None))])], []), +('Module', [('FunctionDef', (1, 0, 1, 16), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 12, 1, 16))], [], None, None, [('TypeVar', (1, 6, 1, 7), 'T', None, None)])], []), +('Module', [('FunctionDef', (1, 0, 1, 26), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 22, 1, 26))], [], None, None, [('TypeVar', (1, 6, 1, 7), 'T', None, None), ('TypeVarTuple', (1, 9, 1, 12), 'Ts', None), ('ParamSpec', (1, 14, 1, 17), 'P', None)])], []), +('Module', [('FunctionDef', (1, 0, 1, 31), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 27, 1, 31))], [], None, None, [('TypeVar', (1, 6, 1, 12), 'T', ('Name', (1, 9, 1, 12), 'int', ('Load',)), None), ('TypeVarTuple', (1, 14, 1, 17), 'Ts', None), ('ParamSpec', (1, 19, 1, 22), 'P', None)])], []), +('Module', [('FunctionDef', (1, 0, 1, 38), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 34, 1, 38))], [], None, None, [('TypeVar', (1, 6, 1, 19), 'T', ('Tuple', (1, 9, 1, 19), [('Name', (1, 10, 1, 13), 'int', ('Load',)), ('Name', (1, 15, 1, 18), 'str', ('Load',))], ('Load',)), None), ('TypeVarTuple', (1, 21, 1, 24), 'Ts', None), ('ParamSpec', (1, 26, 1, 29), 'P', None)])], []), +('Module', [('FunctionDef', (1, 0, 1, 43), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 39, 1, 43))], [], None, None, [('TypeVar', (1, 6, 1, 16), 'T', ('Name', (1, 9, 1, 12), 'int', ('Load',)), ('Constant', (1, 15, 1, 16), 1, None)), ('TypeVarTuple', (1, 18, 1, 25), 'Ts', ('Constant', (1, 24, 1, 25), 2, None)), ('ParamSpec', (1, 27, 1, 34), 'P', ('Constant', (1, 33, 1, 34), 3, None))])], []), +('Module', [('Match', (1, 0, 3, 6), ('Name', (1, 6, 1, 7), 'x', ('Load',)), [('match_case', ('MatchValue', (2, 6, 2, 7), ('Constant', (2, 6, 2, 7), 1, None)), None, [('Pass', (3, 2, 3, 6))])])], []), +('Module', [('Match', (1, 0, 5, 6), ('Name', (1, 6, 1, 7), 'x', ('Load',)), [('match_case', ('MatchValue', (2, 6, 2, 7), ('Constant', (2, 6, 2, 7), 1, None)), None, [('Pass', (3, 2, 3, 6))]), ('match_case', ('MatchAs', (4, 6, 4, 7), None, None), None, [('Pass', (5, 2, 5, 6))])])], []), +] +single_results = [ +('Interactive', [('Expr', (1, 0, 1, 3), ('BinOp', (1, 0, 1, 3), ('Constant', (1, 0, 1, 1), 1, None), ('Add',), ('Constant', (1, 2, 1, 3), 2, None)))]), +] +eval_results = [ +('Expression', ('Constant', (1, 0, 1, 4), None, None)), +('Expression', ('Constant', (1, 0, 1, 4), True, None)), +('Expression', ('Constant', (1, 0, 1, 5), False, None)), +('Expression', ('BoolOp', (1, 0, 1, 7), ('And',), [('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Name', (1, 6, 1, 7), 'b', ('Load',))])), +('Expression', ('BoolOp', (1, 0, 1, 6), ('Or',), [('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Name', (1, 5, 1, 6), 'b', ('Load',))])), +('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Add',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), +('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Sub',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), +('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Mult',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), +('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Div',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), +('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('MatMult',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), +('Expression', ('BinOp', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('FloorDiv',), ('Name', (1, 5, 1, 6), 'b', ('Load',)))), +('Expression', ('BinOp', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Pow',), ('Name', (1, 5, 1, 6), 'b', ('Load',)))), +('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Mod',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), +('Expression', ('BinOp', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('RShift',), ('Name', (1, 5, 1, 6), 'b', ('Load',)))), +('Expression', ('BinOp', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('LShift',), ('Name', (1, 5, 1, 6), 'b', ('Load',)))), +('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('BitXor',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), +('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('BitOr',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), +('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('BitAnd',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), +('Expression', ('UnaryOp', (1, 0, 1, 5), ('Not',), ('Name', (1, 4, 1, 5), 'v', ('Load',)))), +('Expression', ('UnaryOp', (1, 0, 1, 2), ('UAdd',), ('Name', (1, 1, 1, 2), 'v', ('Load',)))), +('Expression', ('UnaryOp', (1, 0, 1, 2), ('USub',), ('Name', (1, 1, 1, 2), 'v', ('Load',)))), +('Expression', ('UnaryOp', (1, 0, 1, 2), ('Invert',), ('Name', (1, 1, 1, 2), 'v', ('Load',)))), +('Expression', ('Lambda', (1, 0, 1, 11), ('arguments', [], [], None, [], [], None, []), ('Constant', (1, 7, 1, 11), None, None))), +('Expression', ('Dict', (1, 0, 1, 7), [('Constant', (1, 2, 1, 3), 1, None)], [('Constant', (1, 4, 1, 5), 2, None)])), +('Expression', ('Dict', (1, 0, 1, 2), [], [])), +('Expression', ('Set', (1, 0, 1, 7), [('Constant', (1, 1, 1, 5), None, None)])), +('Expression', ('Dict', (1, 0, 5, 6), [('Constant', (2, 6, 2, 7), 1, None)], [('Constant', (4, 10, 4, 11), 2, None)])), +('Expression', ('List', (1, 0, 5, 6), [('Constant', (2, 6, 2, 7), 1, None), ('Constant', (4, 8, 4, 9), 1, None)], ('Load',))), +('Expression', ('Tuple', (1, 0, 4, 6), [('Constant', (2, 6, 2, 7), 1, None)], ('Load',))), +('Expression', ('Set', (1, 0, 5, 6), [('Constant', (2, 6, 2, 7), 1, None), ('Constant', (4, 8, 4, 9), 1, None)])), +('Expression', ('ListComp', (1, 0, 1, 19), ('Name', (1, 1, 1, 2), 'a', ('Load',)), [('comprehension', ('Name', (1, 7, 1, 8), 'b', ('Store',)), ('Name', (1, 12, 1, 13), 'c', ('Load',)), [('Name', (1, 17, 1, 18), 'd', ('Load',))], 0)])), +('Expression', ('GeneratorExp', (1, 0, 1, 19), ('Name', (1, 1, 1, 2), 'a', ('Load',)), [('comprehension', ('Name', (1, 7, 1, 8), 'b', ('Store',)), ('Name', (1, 12, 1, 13), 'c', ('Load',)), [('Name', (1, 17, 1, 18), 'd', ('Load',))], 0)])), +('Expression', ('SetComp', (1, 0, 1, 19), ('Name', (1, 1, 1, 2), 'a', ('Load',)), [('comprehension', ('Name', (1, 7, 1, 8), 'b', ('Store',)), ('Name', (1, 12, 1, 13), 'c', ('Load',)), [('Name', (1, 17, 1, 18), 'd', ('Load',))], 0)])), +('Expression', ('DictComp', (1, 0, 1, 25), ('Name', (1, 1, 1, 2), 'k', ('Load',)), ('Name', (1, 4, 1, 5), 'v', ('Load',)), [('comprehension', ('Tuple', (1, 10, 1, 14), [('Name', (1, 10, 1, 11), 'k', ('Store',)), ('Name', (1, 13, 1, 14), 'v', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'c', ('Load',)), [('Name', (1, 23, 1, 24), 'd', ('Load',))], 0)])), +('Expression', ('ListComp', (1, 0, 1, 20), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 14), [('Name', (1, 11, 1, 12), 'a', ('Store',)), ('Name', (1, 13, 1, 14), 'b', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'c', ('Load',)), [], 0)])), +('Expression', ('ListComp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), +('Expression', ('ListComp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('List', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), +('Expression', ('SetComp', (1, 0, 1, 20), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 14), [('Name', (1, 11, 1, 12), 'a', ('Store',)), ('Name', (1, 13, 1, 14), 'b', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'c', ('Load',)), [], 0)])), +('Expression', ('SetComp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), +('Expression', ('SetComp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('List', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), +('Expression', ('GeneratorExp', (1, 0, 1, 20), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 14), [('Name', (1, 11, 1, 12), 'a', ('Store',)), ('Name', (1, 13, 1, 14), 'b', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'c', ('Load',)), [], 0)])), +('Expression', ('GeneratorExp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), +('Expression', ('GeneratorExp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('List', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), +('Expression', ('Compare', (1, 0, 1, 9), ('Constant', (1, 0, 1, 1), 1, None), [('Lt',), ('Lt',)], [('Constant', (1, 4, 1, 5), 2, None), ('Constant', (1, 8, 1, 9), 3, None)])), +('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('Eq',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])), +('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('LtE',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])), +('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('GtE',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])), +('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('NotEq',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])), +('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('Is',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])), +('Expression', ('Compare', (1, 0, 1, 10), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('IsNot',)], [('Name', (1, 9, 1, 10), 'b', ('Load',))])), +('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('In',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])), +('Expression', ('Compare', (1, 0, 1, 10), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('NotIn',)], [('Name', (1, 9, 1, 10), 'b', ('Load',))])), +('Expression', ('Call', (1, 0, 1, 3), ('Name', (1, 0, 1, 1), 'f', ('Load',)), [], [])), +('Expression', ('Call', (1, 0, 1, 17), ('Name', (1, 0, 1, 1), 'f', ('Load',)), [('Constant', (1, 2, 1, 3), 1, None), ('Constant', (1, 4, 1, 5), 2, None), ('Starred', (1, 10, 1, 12), ('Name', (1, 11, 1, 12), 'd', ('Load',)), ('Load',))], [('keyword', (1, 6, 1, 9), 'c', ('Constant', (1, 8, 1, 9), 3, None)), ('keyword', (1, 13, 1, 16), None, ('Name', (1, 15, 1, 16), 'e', ('Load',)))])), +('Expression', ('Call', (1, 0, 1, 10), ('Name', (1, 0, 1, 1), 'f', ('Load',)), [('Starred', (1, 2, 1, 9), ('List', (1, 3, 1, 9), [('Constant', (1, 4, 1, 5), 0, None), ('Constant', (1, 7, 1, 8), 1, None)], ('Load',)), ('Load',))], [])), +('Expression', ('Call', (1, 0, 1, 15), ('Name', (1, 0, 1, 1), 'f', ('Load',)), [('GeneratorExp', (1, 1, 1, 15), ('Name', (1, 2, 1, 3), 'a', ('Load',)), [('comprehension', ('Name', (1, 8, 1, 9), 'a', ('Store',)), ('Name', (1, 13, 1, 14), 'b', ('Load',)), [], 0)])], [])), +('Expression', ('Constant', (1, 0, 1, 2), 10, None)), +('Expression', ('Constant', (1, 0, 1, 2), 1j, None)), +('Expression', ('Constant', (1, 0, 1, 8), 'string', None)), +('Expression', ('Attribute', (1, 0, 1, 3), ('Name', (1, 0, 1, 1), 'a', ('Load',)), 'b', ('Load',))), +('Expression', ('Subscript', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Slice', (1, 2, 1, 5), ('Name', (1, 2, 1, 3), 'b', ('Load',)), ('Name', (1, 4, 1, 5), 'c', ('Load',)), None), ('Load',))), +('Expression', ('Name', (1, 0, 1, 1), 'v', ('Load',))), +('Expression', ('List', (1, 0, 1, 7), [('Constant', (1, 1, 1, 2), 1, None), ('Constant', (1, 3, 1, 4), 2, None), ('Constant', (1, 5, 1, 6), 3, None)], ('Load',))), +('Expression', ('List', (1, 0, 1, 2), [], ('Load',))), +('Expression', ('Tuple', (1, 0, 1, 5), [('Constant', (1, 0, 1, 1), 1, None), ('Constant', (1, 2, 1, 3), 2, None), ('Constant', (1, 4, 1, 5), 3, None)], ('Load',))), +('Expression', ('Tuple', (1, 0, 1, 7), [('Constant', (1, 1, 1, 2), 1, None), ('Constant', (1, 3, 1, 4), 2, None), ('Constant', (1, 5, 1, 6), 3, None)], ('Load',))), +('Expression', ('Tuple', (1, 0, 1, 2), [], ('Load',))), +('Expression', ('Call', (1, 0, 1, 17), ('Attribute', (1, 0, 1, 7), ('Attribute', (1, 0, 1, 5), ('Attribute', (1, 0, 1, 3), ('Name', (1, 0, 1, 1), 'a', ('Load',)), 'b', ('Load',)), 'c', ('Load',)), 'd', ('Load',)), [('Subscript', (1, 8, 1, 16), ('Attribute', (1, 8, 1, 11), ('Name', (1, 8, 1, 9), 'a', ('Load',)), 'b', ('Load',)), ('Slice', (1, 12, 1, 15), ('Constant', (1, 12, 1, 13), 1, None), ('Constant', (1, 14, 1, 15), 2, None), None), ('Load',))], [])), +('Expression', ('Subscript', (1, 0, 1, 7), ('List', (1, 0, 1, 3), [('Constant', (1, 1, 1, 2), 5, None)], ('Load',)), ('Slice', (1, 4, 1, 6), ('Constant', (1, 4, 1, 5), 1, None), None, None), ('Load',))), +('Expression', ('Subscript', (1, 0, 1, 7), ('List', (1, 0, 1, 3), [('Constant', (1, 1, 1, 2), 5, None)], ('Load',)), ('Slice', (1, 4, 1, 6), None, ('Constant', (1, 5, 1, 6), 1, None), None), ('Load',))), +('Expression', ('Subscript', (1, 0, 1, 8), ('List', (1, 0, 1, 3), [('Constant', (1, 1, 1, 2), 5, None)], ('Load',)), ('Slice', (1, 4, 1, 7), None, None, ('Constant', (1, 6, 1, 7), 1, None)), ('Load',))), +('Expression', ('Subscript', (1, 0, 1, 10), ('List', (1, 0, 1, 3), [('Constant', (1, 1, 1, 2), 5, None)], ('Load',)), ('Slice', (1, 4, 1, 9), ('Constant', (1, 4, 1, 5), 1, None), ('Constant', (1, 6, 1, 7), 1, None), ('Constant', (1, 8, 1, 9), 1, None)), ('Load',))), +('Expression', ('IfExp', (1, 0, 1, 21), ('Name', (1, 9, 1, 10), 'x', ('Load',)), ('Call', (1, 0, 1, 5), ('Name', (1, 0, 1, 3), 'foo', ('Load',)), [], []), ('Call', (1, 16, 1, 21), ('Name', (1, 16, 1, 19), 'bar', ('Load',)), [], []))), +('Expression', ('JoinedStr', (1, 0, 1, 6), [('FormattedValue', (1, 2, 1, 5), ('Name', (1, 3, 1, 4), 'a', ('Load',)), -1, None)])), +('Expression', ('JoinedStr', (1, 0, 1, 10), [('FormattedValue', (1, 2, 1, 9), ('Name', (1, 3, 1, 4), 'a', ('Load',)), -1, ('JoinedStr', (1, 4, 1, 8), [('Constant', (1, 5, 1, 8), '.2f', None)]))])), +('Expression', ('JoinedStr', (1, 0, 1, 8), [('FormattedValue', (1, 2, 1, 7), ('Name', (1, 3, 1, 4), 'a', ('Load',)), 114, None)])), +('Expression', ('JoinedStr', (1, 0, 1, 11), [('Constant', (1, 2, 1, 6), 'foo(', None), ('FormattedValue', (1, 6, 1, 9), ('Name', (1, 7, 1, 8), 'a', ('Load',)), -1, None), ('Constant', (1, 9, 1, 10), ')', None)])), +] +main() diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast/test_ast.py similarity index 68% rename from Lib/test/test_ast.py rename to Lib/test/test_ast/test_ast.py index 55725ec36fd3a7..0a3edef4678546 100644 --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast/test_ast.py @@ -19,389 +19,16 @@ from test import support from test.support import os_helper, script_helper from test.support.ast_helper import ASTTestMixin +from test.test_ast.utils import to_tuple +from test.test_ast.snippets import ( + eval_tests, eval_results, exec_tests, exec_results, single_tests, single_results +) -def to_tuple(t): - if t is None or isinstance(t, (str, int, complex, float, bytes)) or t is Ellipsis: - return t - elif isinstance(t, list): - return [to_tuple(e) for e in t] - result = [t.__class__.__name__] - if hasattr(t, 'lineno') and hasattr(t, 'col_offset'): - result.append((t.lineno, t.col_offset)) - if hasattr(t, 'end_lineno') and hasattr(t, 'end_col_offset'): - result[-1] += (t.end_lineno, t.end_col_offset) - if t._fields is None: - return tuple(result) - for f in t._fields: - result.append(to_tuple(getattr(t, f))) - return tuple(result) STDLIB = os.path.dirname(ast.__file__) STDLIB_FILES = [fn for fn in os.listdir(STDLIB) if fn.endswith(".py")] STDLIB_FILES.extend(["test/test_grammar.py", "test/test_unpack_ex.py"]) -# These tests are compiled through "exec" -# There should be at least one test per statement -exec_tests = [ - # Module docstring - "'module docstring'", - # FunctionDef - "def f(): pass", - # FunctionDef with docstring - "def f(): 'function docstring'", - # FunctionDef with arg - "def f(a): pass", - # FunctionDef with arg and default value - "def f(a=0): pass", - # FunctionDef with varargs - "def f(*args): pass", - # FunctionDef with varargs as TypeVarTuple - "def f(*args: *Ts): pass", - # FunctionDef with varargs as unpacked Tuple - "def f(*args: *tuple[int, ...]): pass", - # FunctionDef with varargs as unpacked Tuple *and* TypeVarTuple - "def f(*args: *tuple[int, *Ts]): pass", - # FunctionDef with kwargs - "def f(**kwargs): pass", - # FunctionDef with all kind of args and docstring - "def f(a, b=1, c=None, d=[], e={}, *args, f=42, **kwargs): 'doc for f()'", - # FunctionDef with type annotation on return involving unpacking - "def f() -> tuple[*Ts]: pass", - "def f() -> tuple[int, *Ts]: pass", - "def f() -> tuple[int, *tuple[int, ...]]: pass", - # ClassDef - "class C:pass", - # ClassDef with docstring - "class C: 'docstring for class C'", - # ClassDef, new style class - "class C(object): pass", - # Classdef with multiple bases - "class C(A, B): pass", - # Return - "def f():return 1", - "def f():return", - # Delete - "del v", - # Assign - "v = 1", - "a,b = c", - "(a,b) = c", - "[a,b] = c", - "a[b] = c", - # AnnAssign with unpacked types - "x: tuple[*Ts]", - "x: tuple[int, *Ts]", - "x: tuple[int, *tuple[str, ...]]", - # AugAssign - "v += 1", - "v -= 1", - "v *= 1", - "v @= 1", - "v /= 1", - "v %= 1", - "v **= 1", - "v <<= 1", - "v >>= 1", - "v |= 1", - "v ^= 1", - "v &= 1", - "v //= 1", - # For - "for v in v:pass", - # For-Else - "for v in v:\n pass\nelse:\n pass", - # While - "while v:pass", - # While-Else - "while v:\n pass\nelse:\n pass", - # If-Elif-Else - "if v:pass", - "if a:\n pass\nelif b:\n pass", - "if a:\n pass\nelse:\n pass", - "if a:\n pass\nelif b:\n pass\nelse:\n pass", - "if a:\n pass\nelif b:\n pass\nelif b:\n pass\nelif b:\n pass\nelse:\n pass", - # With - "with x: pass", - "with x, y: pass", - "with x as y: pass", - "with x as y, z as q: pass", - "with (x as y): pass", - "with (x, y): pass", - # Raise - "raise", - "raise Exception('string')", - "raise Exception", - "raise Exception('string') from None", - # TryExcept - "try:\n pass\nexcept Exception:\n pass", - "try:\n pass\nexcept Exception as exc:\n pass", - # TryFinally - "try:\n pass\nfinally:\n pass", - # TryStarExcept - "try:\n pass\nexcept* Exception:\n pass", - "try:\n pass\nexcept* Exception as exc:\n pass", - # TryExceptFinallyElse - "try:\n pass\nexcept Exception:\n pass\nelse: pass\nfinally:\n pass", - "try:\n pass\nexcept Exception as exc:\n pass\nelse: pass\nfinally:\n pass", - "try:\n pass\nexcept* Exception as exc:\n pass\nelse: pass\nfinally:\n pass", - # Assert - "assert v", - # Assert with message - "assert v, 'message'", - # Import - "import sys", - "import foo as bar", - # ImportFrom - "from sys import x as y", - "from sys import v", - # Global - "global v", - # Expr - "1", - # Pass, - "pass", - # Break - "for v in v:break", - # Continue - "for v in v:continue", - # for statements with naked tuples (see http://bugs.python.org/issue6704) - "for a,b in c: pass", - "for (a,b) in c: pass", - "for [a,b] in c: pass", - # Multiline generator expression (test for .lineno & .col_offset) - """( - ( - Aa - , - Bb - ) - for - Aa - , - Bb in Cc - )""", - # dictcomp - "{a : b for w in x for m in p if g}", - # dictcomp with naked tuple - "{a : b for v,w in x}", - # setcomp - "{r for l in x if g}", - # setcomp with naked tuple - "{r for l,m in x}", - # AsyncFunctionDef - "async def f():\n 'async function'\n await something()", - # AsyncFor - "async def f():\n async for e in i: 1\n else: 2", - # AsyncWith - "async def f():\n async with a as b: 1", - # PEP 448: Additional Unpacking Generalizations - "{**{1:2}, 2:3}", - "{*{1, 2}, 3}", - # Function with yield (from) - "def f(): yield 1", - "def f(): yield from []", - # Asynchronous comprehensions - "async def f():\n [i async for b in c]", - # Decorated FunctionDef - "@deco1\n@deco2()\n@deco3(1)\ndef f(): pass", - # Decorated AsyncFunctionDef - "@deco1\n@deco2()\n@deco3(1)\nasync def f(): pass", - # Decorated ClassDef - "@deco1\n@deco2()\n@deco3(1)\nclass C: pass", - # Decorator with generator argument - "@deco(a for a in b)\ndef f(): pass", - # Decorator with attribute - "@a.b.c\ndef f(): pass", - # Simple assignment expression - "(a := 1)", - # Assignment expression in if statement - "if a := foo(): pass", - # Assignment expression in while - "while a := foo(): pass", - # Positional-only arguments - "def f(a, /,): pass", - "def f(a, /, c, d, e): pass", - "def f(a, /, c, *, d, e): pass", - "def f(a, /, c, *, d, e, **kwargs): pass", - # Positional-only arguments with defaults - "def f(a=1, /,): pass", - "def f(a=1, /, b=2, c=4): pass", - "def f(a=1, /, b=2, *, c=4): pass", - "def f(a=1, /, b=2, *, c): pass", - "def f(a=1, /, b=2, *, c=4, **kwargs): pass", - "def f(a=1, /, b=2, *, c, **kwargs): pass", - # Type aliases - "type X = int", - "type X[T] = int", - "type X[T, *Ts, **P] = (T, Ts, P)", - "type X[T: int, *Ts, **P] = (T, Ts, P)", - "type X[T: (int, str), *Ts, **P] = (T, Ts, P)", - "type X[T: int = 1, *Ts = 2, **P =3] = (T, Ts, P)", - # Generic classes - "class X[T]: pass", - "class X[T, *Ts, **P]: pass", - "class X[T: int, *Ts, **P]: pass", - "class X[T: (int, str), *Ts, **P]: pass", - "class X[T: int = 1, *Ts = 2, **P = 3]: pass", - # Generic functions - "def f[T](): pass", - "def f[T, *Ts, **P](): pass", - "def f[T: int, *Ts, **P](): pass", - "def f[T: (int, str), *Ts, **P](): pass", - "def f[T: int = 1, *Ts = 2, **P = 3](): pass", - # Match - "match x:\n\tcase 1:\n\t\tpass", - # Match with _ - "match x:\n\tcase 1:\n\t\tpass\n\tcase _:\n\t\tpass", -] - -# These are compiled through "single" -# because of overlap with "eval", it just tests what -# can't be tested with "eval" -single_tests = [ - "1+2" -] - -# These are compiled through "eval" -# It should test all expressions -eval_tests = [ - # Constant(value=None) - "None", - # True - "True", - # False - "False", - # BoolOp - "a and b", - "a or b", - # BinOp - "a + b", - "a - b", - "a * b", - "a / b", - "a @ b", - "a // b", - "a ** b", - "a % b", - "a >> b", - "a << b", - "a ^ b", - "a | b", - "a & b", - # UnaryOp - "not v", - "+v", - "-v", - "~v", - # Lambda - "lambda:None", - # Dict - "{ 1:2 }", - # Empty dict - "{}", - # Set - "{None,}", - # Multiline dict (test for .lineno & .col_offset) - """{ - 1 - : - 2 - }""", - # Multiline list - """[ - 1 - , - 1 - ]""", - # Multiline tuple - """( - 1 - , - )""", - # Multiline set - """{ - 1 - , - 1 - }""", - # ListComp - "[a for b in c if d]", - # GeneratorExp - "(a for b in c if d)", - # SetComp - "{a for b in c if d}", - # DictComp - "{k: v for k, v in c if d}", - # Comprehensions with multiple for targets - "[(a,b) for a,b in c]", - "[(a,b) for (a,b) in c]", - "[(a,b) for [a,b] in c]", - "{(a,b) for a,b in c}", - "{(a,b) for (a,b) in c}", - "{(a,b) for [a,b] in c}", - "((a,b) for a,b in c)", - "((a,b) for (a,b) in c)", - "((a,b) for [a,b] in c)", - # Async comprehensions - async comprehensions can't work outside an asynchronous function - # - # Yield - yield expressions can't work outside a function - # - # Compare - "1 < 2 < 3", - "a == b", - "a <= b", - "a >= b", - "a != b", - "a is b", - "a is not b", - "a in b", - "a not in b", - # Call without argument - "f()", - # Call - "f(1,2,c=3,*d,**e)", - # Call with multi-character starred - "f(*[0, 1])", - # Call with a generator argument - "f(a for a in b)", - # Constant(value=int()) - "10", - # Complex num - "1j", - # Constant(value=str()) - "'string'", - # Attribute - "a.b", - # Subscript - "a[b:c]", - # Name - "v", - # List - "[1,2,3]", - # Empty list - "[]", - # Tuple - "1,2,3", - # Tuple - "(1,2,3)", - # Empty tuple - "()", - # Combination - "a.b.c.d(a.b[1:2])", - # Slice - "[5][1:]", - "[5][:1]", - "[5][::1]", - "[5][1:1:1]", - # IfExp - "foo() if x else bar()", - # JoinedStr and FormattedValue - "f'{a}'", - "f'{a:.2f}'", - "f'{a!r}'", - "f'foo({a})'", -] - class AST_Tests(unittest.TestCase): maxDiff = None @@ -3408,238 +3035,3 @@ def test_cli_file_input(self): self.assertEqual(expected.splitlines(), res.out.decode("utf8").splitlines()) self.assertEqual(res.rc, 0) - - -def main(): - if __name__ != '__main__': - return - if sys.argv[1:] == ['-g']: - for statements, kind in ((exec_tests, "exec"), (single_tests, "single"), - (eval_tests, "eval")): - print(kind+"_results = [") - for statement in statements: - tree = ast.parse(statement, "?", kind) - print("%r," % (to_tuple(tree),)) - print("]") - print("main()") - raise SystemExit - unittest.main() - -#### EVERYTHING BELOW IS GENERATED BY python Lib/test/test_ast.py -g ##### -exec_results = [ -('Module', [('Expr', (1, 0, 1, 18), ('Constant', (1, 0, 1, 18), 'module docstring', None))], []), -('Module', [('FunctionDef', (1, 0, 1, 13), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 9, 1, 13))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 29), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (1, 9, 1, 29), ('Constant', (1, 9, 1, 29), 'function docstring', None))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 14), 'f', ('arguments', [], [('arg', (1, 6, 1, 7), 'a', None, None)], None, [], [], None, []), [('Pass', (1, 10, 1, 14))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 16), 'f', ('arguments', [], [('arg', (1, 6, 1, 7), 'a', None, None)], None, [], [], None, [('Constant', (1, 8, 1, 9), 0, None)]), [('Pass', (1, 12, 1, 16))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 18), 'f', ('arguments', [], [], ('arg', (1, 7, 1, 11), 'args', None, None), [], [], None, []), [('Pass', (1, 14, 1, 18))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 23), 'f', ('arguments', [], [], ('arg', (1, 7, 1, 16), 'args', ('Starred', (1, 13, 1, 16), ('Name', (1, 14, 1, 16), 'Ts', ('Load',)), ('Load',)), None), [], [], None, []), [('Pass', (1, 19, 1, 23))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 36), 'f', ('arguments', [], [], ('arg', (1, 7, 1, 29), 'args', ('Starred', (1, 13, 1, 29), ('Subscript', (1, 14, 1, 29), ('Name', (1, 14, 1, 19), 'tuple', ('Load',)), ('Tuple', (1, 20, 1, 28), [('Name', (1, 20, 1, 23), 'int', ('Load',)), ('Constant', (1, 25, 1, 28), Ellipsis, None)], ('Load',)), ('Load',)), ('Load',)), None), [], [], None, []), [('Pass', (1, 32, 1, 36))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 36), 'f', ('arguments', [], [], ('arg', (1, 7, 1, 29), 'args', ('Starred', (1, 13, 1, 29), ('Subscript', (1, 14, 1, 29), ('Name', (1, 14, 1, 19), 'tuple', ('Load',)), ('Tuple', (1, 20, 1, 28), [('Name', (1, 20, 1, 23), 'int', ('Load',)), ('Starred', (1, 25, 1, 28), ('Name', (1, 26, 1, 28), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), ('Load',)), None), [], [], None, []), [('Pass', (1, 32, 1, 36))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 21), 'f', ('arguments', [], [], None, [], [], ('arg', (1, 8, 1, 14), 'kwargs', None, None), []), [('Pass', (1, 17, 1, 21))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 71), 'f', ('arguments', [], [('arg', (1, 6, 1, 7), 'a', None, None), ('arg', (1, 9, 1, 10), 'b', None, None), ('arg', (1, 14, 1, 15), 'c', None, None), ('arg', (1, 22, 1, 23), 'd', None, None), ('arg', (1, 28, 1, 29), 'e', None, None)], ('arg', (1, 35, 1, 39), 'args', None, None), [('arg', (1, 41, 1, 42), 'f', None, None)], [('Constant', (1, 43, 1, 45), 42, None)], ('arg', (1, 49, 1, 55), 'kwargs', None, None), [('Constant', (1, 11, 1, 12), 1, None), ('Constant', (1, 16, 1, 20), None, None), ('List', (1, 24, 1, 26), [], ('Load',)), ('Dict', (1, 30, 1, 32), [], [])]), [('Expr', (1, 58, 1, 71), ('Constant', (1, 58, 1, 71), 'doc for f()', None))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 27), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 23, 1, 27))], [], ('Subscript', (1, 11, 1, 21), ('Name', (1, 11, 1, 16), 'tuple', ('Load',)), ('Tuple', (1, 17, 1, 20), [('Starred', (1, 17, 1, 20), ('Name', (1, 18, 1, 20), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 32), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 28, 1, 32))], [], ('Subscript', (1, 11, 1, 26), ('Name', (1, 11, 1, 16), 'tuple', ('Load',)), ('Tuple', (1, 17, 1, 25), [('Name', (1, 17, 1, 20), 'int', ('Load',)), ('Starred', (1, 22, 1, 25), ('Name', (1, 23, 1, 25), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 45), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 41, 1, 45))], [], ('Subscript', (1, 11, 1, 39), ('Name', (1, 11, 1, 16), 'tuple', ('Load',)), ('Tuple', (1, 17, 1, 38), [('Name', (1, 17, 1, 20), 'int', ('Load',)), ('Starred', (1, 22, 1, 38), ('Subscript', (1, 23, 1, 38), ('Name', (1, 23, 1, 28), 'tuple', ('Load',)), ('Tuple', (1, 29, 1, 37), [('Name', (1, 29, 1, 32), 'int', ('Load',)), ('Constant', (1, 34, 1, 37), Ellipsis, None)], ('Load',)), ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, [])], []), -('Module', [('ClassDef', (1, 0, 1, 12), 'C', [], [], [('Pass', (1, 8, 1, 12))], [], [])], []), -('Module', [('ClassDef', (1, 0, 1, 32), 'C', [], [], [('Expr', (1, 9, 1, 32), ('Constant', (1, 9, 1, 32), 'docstring for class C', None))], [], [])], []), -('Module', [('ClassDef', (1, 0, 1, 21), 'C', [('Name', (1, 8, 1, 14), 'object', ('Load',))], [], [('Pass', (1, 17, 1, 21))], [], [])], []), -('Module', [('ClassDef', (1, 0, 1, 19), 'C', [('Name', (1, 8, 1, 9), 'A', ('Load',)), ('Name', (1, 11, 1, 12), 'B', ('Load',))], [], [('Pass', (1, 15, 1, 19))], [], [])], []), -('Module', [('FunctionDef', (1, 0, 1, 16), 'f', ('arguments', [], [], None, [], [], None, []), [('Return', (1, 8, 1, 16), ('Constant', (1, 15, 1, 16), 1, None))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 14), 'f', ('arguments', [], [], None, [], [], None, []), [('Return', (1, 8, 1, 14), None)], [], None, None, [])], []), -('Module', [('Delete', (1, 0, 1, 5), [('Name', (1, 4, 1, 5), 'v', ('Del',))])], []), -('Module', [('Assign', (1, 0, 1, 5), [('Name', (1, 0, 1, 1), 'v', ('Store',))], ('Constant', (1, 4, 1, 5), 1, None), None)], []), -('Module', [('Assign', (1, 0, 1, 7), [('Tuple', (1, 0, 1, 3), [('Name', (1, 0, 1, 1), 'a', ('Store',)), ('Name', (1, 2, 1, 3), 'b', ('Store',))], ('Store',))], ('Name', (1, 6, 1, 7), 'c', ('Load',)), None)], []), -('Module', [('Assign', (1, 0, 1, 9), [('Tuple', (1, 0, 1, 5), [('Name', (1, 1, 1, 2), 'a', ('Store',)), ('Name', (1, 3, 1, 4), 'b', ('Store',))], ('Store',))], ('Name', (1, 8, 1, 9), 'c', ('Load',)), None)], []), -('Module', [('Assign', (1, 0, 1, 9), [('List', (1, 0, 1, 5), [('Name', (1, 1, 1, 2), 'a', ('Store',)), ('Name', (1, 3, 1, 4), 'b', ('Store',))], ('Store',))], ('Name', (1, 8, 1, 9), 'c', ('Load',)), None)], []), -('Module', [('Assign', (1, 0, 1, 8), [('Subscript', (1, 0, 1, 4), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Name', (1, 2, 1, 3), 'b', ('Load',)), ('Store',))], ('Name', (1, 7, 1, 8), 'c', ('Load',)), None)], []), -('Module', [('AnnAssign', (1, 0, 1, 13), ('Name', (1, 0, 1, 1), 'x', ('Store',)), ('Subscript', (1, 3, 1, 13), ('Name', (1, 3, 1, 8), 'tuple', ('Load',)), ('Tuple', (1, 9, 1, 12), [('Starred', (1, 9, 1, 12), ('Name', (1, 10, 1, 12), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, 1)], []), -('Module', [('AnnAssign', (1, 0, 1, 18), ('Name', (1, 0, 1, 1), 'x', ('Store',)), ('Subscript', (1, 3, 1, 18), ('Name', (1, 3, 1, 8), 'tuple', ('Load',)), ('Tuple', (1, 9, 1, 17), [('Name', (1, 9, 1, 12), 'int', ('Load',)), ('Starred', (1, 14, 1, 17), ('Name', (1, 15, 1, 17), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, 1)], []), -('Module', [('AnnAssign', (1, 0, 1, 31), ('Name', (1, 0, 1, 1), 'x', ('Store',)), ('Subscript', (1, 3, 1, 31), ('Name', (1, 3, 1, 8), 'tuple', ('Load',)), ('Tuple', (1, 9, 1, 30), [('Name', (1, 9, 1, 12), 'int', ('Load',)), ('Starred', (1, 14, 1, 30), ('Subscript', (1, 15, 1, 30), ('Name', (1, 15, 1, 20), 'tuple', ('Load',)), ('Tuple', (1, 21, 1, 29), [('Name', (1, 21, 1, 24), 'str', ('Load',)), ('Constant', (1, 26, 1, 29), Ellipsis, None)], ('Load',)), ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, 1)], []), -('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Add',), ('Constant', (1, 5, 1, 6), 1, None))], []), -('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Sub',), ('Constant', (1, 5, 1, 6), 1, None))], []), -('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Mult',), ('Constant', (1, 5, 1, 6), 1, None))], []), -('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('MatMult',), ('Constant', (1, 5, 1, 6), 1, None))], []), -('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Div',), ('Constant', (1, 5, 1, 6), 1, None))], []), -('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Mod',), ('Constant', (1, 5, 1, 6), 1, None))], []), -('Module', [('AugAssign', (1, 0, 1, 7), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Pow',), ('Constant', (1, 6, 1, 7), 1, None))], []), -('Module', [('AugAssign', (1, 0, 1, 7), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('LShift',), ('Constant', (1, 6, 1, 7), 1, None))], []), -('Module', [('AugAssign', (1, 0, 1, 7), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('RShift',), ('Constant', (1, 6, 1, 7), 1, None))], []), -('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('BitOr',), ('Constant', (1, 5, 1, 6), 1, None))], []), -('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('BitXor',), ('Constant', (1, 5, 1, 6), 1, None))], []), -('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('BitAnd',), ('Constant', (1, 5, 1, 6), 1, None))], []), -('Module', [('AugAssign', (1, 0, 1, 7), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('FloorDiv',), ('Constant', (1, 6, 1, 7), 1, None))], []), -('Module', [('For', (1, 0, 1, 15), ('Name', (1, 4, 1, 5), 'v', ('Store',)), ('Name', (1, 9, 1, 10), 'v', ('Load',)), [('Pass', (1, 11, 1, 15))], [], None)], []), -('Module', [('For', (1, 0, 4, 6), ('Name', (1, 4, 1, 5), 'v', ('Store',)), ('Name', (1, 9, 1, 10), 'v', ('Load',)), [('Pass', (2, 2, 2, 6))], [('Pass', (4, 2, 4, 6))], None)], []), -('Module', [('While', (1, 0, 1, 12), ('Name', (1, 6, 1, 7), 'v', ('Load',)), [('Pass', (1, 8, 1, 12))], [])], []), -('Module', [('While', (1, 0, 4, 6), ('Name', (1, 6, 1, 7), 'v', ('Load',)), [('Pass', (2, 2, 2, 6))], [('Pass', (4, 2, 4, 6))])], []), -('Module', [('If', (1, 0, 1, 9), ('Name', (1, 3, 1, 4), 'v', ('Load',)), [('Pass', (1, 5, 1, 9))], [])], []), -('Module', [('If', (1, 0, 4, 6), ('Name', (1, 3, 1, 4), 'a', ('Load',)), [('Pass', (2, 2, 2, 6))], [('If', (3, 0, 4, 6), ('Name', (3, 5, 3, 6), 'b', ('Load',)), [('Pass', (4, 2, 4, 6))], [])])], []), -('Module', [('If', (1, 0, 4, 6), ('Name', (1, 3, 1, 4), 'a', ('Load',)), [('Pass', (2, 2, 2, 6))], [('Pass', (4, 2, 4, 6))])], []), -('Module', [('If', (1, 0, 6, 6), ('Name', (1, 3, 1, 4), 'a', ('Load',)), [('Pass', (2, 2, 2, 6))], [('If', (3, 0, 6, 6), ('Name', (3, 5, 3, 6), 'b', ('Load',)), [('Pass', (4, 2, 4, 6))], [('Pass', (6, 2, 6, 6))])])], []), -('Module', [('If', (1, 0, 10, 6), ('Name', (1, 3, 1, 4), 'a', ('Load',)), [('Pass', (2, 2, 2, 6))], [('If', (3, 0, 10, 6), ('Name', (3, 5, 3, 6), 'b', ('Load',)), [('Pass', (4, 2, 4, 6))], [('If', (5, 0, 10, 6), ('Name', (5, 5, 5, 6), 'b', ('Load',)), [('Pass', (6, 2, 6, 6))], [('If', (7, 0, 10, 6), ('Name', (7, 5, 7, 6), 'b', ('Load',)), [('Pass', (8, 2, 8, 6))], [('Pass', (10, 2, 10, 6))])])])])], []), -('Module', [('With', (1, 0, 1, 12), [('withitem', ('Name', (1, 5, 1, 6), 'x', ('Load',)), None)], [('Pass', (1, 8, 1, 12))], None)], []), -('Module', [('With', (1, 0, 1, 15), [('withitem', ('Name', (1, 5, 1, 6), 'x', ('Load',)), None), ('withitem', ('Name', (1, 8, 1, 9), 'y', ('Load',)), None)], [('Pass', (1, 11, 1, 15))], None)], []), -('Module', [('With', (1, 0, 1, 17), [('withitem', ('Name', (1, 5, 1, 6), 'x', ('Load',)), ('Name', (1, 10, 1, 11), 'y', ('Store',)))], [('Pass', (1, 13, 1, 17))], None)], []), -('Module', [('With', (1, 0, 1, 25), [('withitem', ('Name', (1, 5, 1, 6), 'x', ('Load',)), ('Name', (1, 10, 1, 11), 'y', ('Store',))), ('withitem', ('Name', (1, 13, 1, 14), 'z', ('Load',)), ('Name', (1, 18, 1, 19), 'q', ('Store',)))], [('Pass', (1, 21, 1, 25))], None)], []), -('Module', [('With', (1, 0, 1, 19), [('withitem', ('Name', (1, 6, 1, 7), 'x', ('Load',)), ('Name', (1, 11, 1, 12), 'y', ('Store',)))], [('Pass', (1, 15, 1, 19))], None)], []), -('Module', [('With', (1, 0, 1, 17), [('withitem', ('Name', (1, 6, 1, 7), 'x', ('Load',)), None), ('withitem', ('Name', (1, 9, 1, 10), 'y', ('Load',)), None)], [('Pass', (1, 13, 1, 17))], None)], []), -('Module', [('Raise', (1, 0, 1, 5), None, None)], []), -('Module', [('Raise', (1, 0, 1, 25), ('Call', (1, 6, 1, 25), ('Name', (1, 6, 1, 15), 'Exception', ('Load',)), [('Constant', (1, 16, 1, 24), 'string', None)], []), None)], []), -('Module', [('Raise', (1, 0, 1, 15), ('Name', (1, 6, 1, 15), 'Exception', ('Load',)), None)], []), -('Module', [('Raise', (1, 0, 1, 35), ('Call', (1, 6, 1, 25), ('Name', (1, 6, 1, 15), 'Exception', ('Load',)), [('Constant', (1, 16, 1, 24), 'string', None)], []), ('Constant', (1, 31, 1, 35), None, None))], []), -('Module', [('Try', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 7, 3, 16), 'Exception', ('Load',)), None, [('Pass', (4, 2, 4, 6))])], [], [])], []), -('Module', [('Try', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 7, 3, 16), 'Exception', ('Load',)), 'exc', [('Pass', (4, 2, 4, 6))])], [], [])], []), -('Module', [('Try', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [], [], [('Pass', (4, 2, 4, 6))])], []), -('Module', [('TryStar', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 8, 3, 17), 'Exception', ('Load',)), None, [('Pass', (4, 2, 4, 6))])], [], [])], []), -('Module', [('TryStar', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 8, 3, 17), 'Exception', ('Load',)), 'exc', [('Pass', (4, 2, 4, 6))])], [], [])], []), -('Module', [('Try', (1, 0, 7, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 7, 3, 16), 'Exception', ('Load',)), None, [('Pass', (4, 2, 4, 6))])], [('Pass', (5, 7, 5, 11))], [('Pass', (7, 2, 7, 6))])], []), -('Module', [('Try', (1, 0, 7, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 7, 3, 16), 'Exception', ('Load',)), 'exc', [('Pass', (4, 2, 4, 6))])], [('Pass', (5, 7, 5, 11))], [('Pass', (7, 2, 7, 6))])], []), -('Module', [('TryStar', (1, 0, 7, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 8, 3, 17), 'Exception', ('Load',)), 'exc', [('Pass', (4, 2, 4, 6))])], [('Pass', (5, 7, 5, 11))], [('Pass', (7, 2, 7, 6))])], []), -('Module', [('Assert', (1, 0, 1, 8), ('Name', (1, 7, 1, 8), 'v', ('Load',)), None)], []), -('Module', [('Assert', (1, 0, 1, 19), ('Name', (1, 7, 1, 8), 'v', ('Load',)), ('Constant', (1, 10, 1, 19), 'message', None))], []), -('Module', [('Import', (1, 0, 1, 10), [('alias', (1, 7, 1, 10), 'sys', None)])], []), -('Module', [('Import', (1, 0, 1, 17), [('alias', (1, 7, 1, 17), 'foo', 'bar')])], []), -('Module', [('ImportFrom', (1, 0, 1, 22), 'sys', [('alias', (1, 16, 1, 22), 'x', 'y')], 0)], []), -('Module', [('ImportFrom', (1, 0, 1, 17), 'sys', [('alias', (1, 16, 1, 17), 'v', None)], 0)], []), -('Module', [('Global', (1, 0, 1, 8), ['v'])], []), -('Module', [('Expr', (1, 0, 1, 1), ('Constant', (1, 0, 1, 1), 1, None))], []), -('Module', [('Pass', (1, 0, 1, 4))], []), -('Module', [('For', (1, 0, 1, 16), ('Name', (1, 4, 1, 5), 'v', ('Store',)), ('Name', (1, 9, 1, 10), 'v', ('Load',)), [('Break', (1, 11, 1, 16))], [], None)], []), -('Module', [('For', (1, 0, 1, 19), ('Name', (1, 4, 1, 5), 'v', ('Store',)), ('Name', (1, 9, 1, 10), 'v', ('Load',)), [('Continue', (1, 11, 1, 19))], [], None)], []), -('Module', [('For', (1, 0, 1, 18), ('Tuple', (1, 4, 1, 7), [('Name', (1, 4, 1, 5), 'a', ('Store',)), ('Name', (1, 6, 1, 7), 'b', ('Store',))], ('Store',)), ('Name', (1, 11, 1, 12), 'c', ('Load',)), [('Pass', (1, 14, 1, 18))], [], None)], []), -('Module', [('For', (1, 0, 1, 20), ('Tuple', (1, 4, 1, 9), [('Name', (1, 5, 1, 6), 'a', ('Store',)), ('Name', (1, 7, 1, 8), 'b', ('Store',))], ('Store',)), ('Name', (1, 13, 1, 14), 'c', ('Load',)), [('Pass', (1, 16, 1, 20))], [], None)], []), -('Module', [('For', (1, 0, 1, 20), ('List', (1, 4, 1, 9), [('Name', (1, 5, 1, 6), 'a', ('Store',)), ('Name', (1, 7, 1, 8), 'b', ('Store',))], ('Store',)), ('Name', (1, 13, 1, 14), 'c', ('Load',)), [('Pass', (1, 16, 1, 20))], [], None)], []), -('Module', [('Expr', (1, 0, 11, 5), ('GeneratorExp', (1, 0, 11, 5), ('Tuple', (2, 4, 6, 5), [('Name', (3, 4, 3, 6), 'Aa', ('Load',)), ('Name', (5, 7, 5, 9), 'Bb', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (8, 4, 10, 6), [('Name', (8, 4, 8, 6), 'Aa', ('Store',)), ('Name', (10, 4, 10, 6), 'Bb', ('Store',))], ('Store',)), ('Name', (10, 10, 10, 12), 'Cc', ('Load',)), [], 0)]))], []), -('Module', [('Expr', (1, 0, 1, 34), ('DictComp', (1, 0, 1, 34), ('Name', (1, 1, 1, 2), 'a', ('Load',)), ('Name', (1, 5, 1, 6), 'b', ('Load',)), [('comprehension', ('Name', (1, 11, 1, 12), 'w', ('Store',)), ('Name', (1, 16, 1, 17), 'x', ('Load',)), [], 0), ('comprehension', ('Name', (1, 22, 1, 23), 'm', ('Store',)), ('Name', (1, 27, 1, 28), 'p', ('Load',)), [('Name', (1, 32, 1, 33), 'g', ('Load',))], 0)]))], []), -('Module', [('Expr', (1, 0, 1, 20), ('DictComp', (1, 0, 1, 20), ('Name', (1, 1, 1, 2), 'a', ('Load',)), ('Name', (1, 5, 1, 6), 'b', ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 14), [('Name', (1, 11, 1, 12), 'v', ('Store',)), ('Name', (1, 13, 1, 14), 'w', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'x', ('Load',)), [], 0)]))], []), -('Module', [('Expr', (1, 0, 1, 19), ('SetComp', (1, 0, 1, 19), ('Name', (1, 1, 1, 2), 'r', ('Load',)), [('comprehension', ('Name', (1, 7, 1, 8), 'l', ('Store',)), ('Name', (1, 12, 1, 13), 'x', ('Load',)), [('Name', (1, 17, 1, 18), 'g', ('Load',))], 0)]))], []), -('Module', [('Expr', (1, 0, 1, 16), ('SetComp', (1, 0, 1, 16), ('Name', (1, 1, 1, 2), 'r', ('Load',)), [('comprehension', ('Tuple', (1, 7, 1, 10), [('Name', (1, 7, 1, 8), 'l', ('Store',)), ('Name', (1, 9, 1, 10), 'm', ('Store',))], ('Store',)), ('Name', (1, 14, 1, 15), 'x', ('Load',)), [], 0)]))], []), -('Module', [('AsyncFunctionDef', (1, 0, 3, 18), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (2, 1, 2, 17), ('Constant', (2, 1, 2, 17), 'async function', None)), ('Expr', (3, 1, 3, 18), ('Await', (3, 1, 3, 18), ('Call', (3, 7, 3, 18), ('Name', (3, 7, 3, 16), 'something', ('Load',)), [], [])))], [], None, None, [])], []), -('Module', [('AsyncFunctionDef', (1, 0, 3, 8), 'f', ('arguments', [], [], None, [], [], None, []), [('AsyncFor', (2, 1, 3, 8), ('Name', (2, 11, 2, 12), 'e', ('Store',)), ('Name', (2, 16, 2, 17), 'i', ('Load',)), [('Expr', (2, 19, 2, 20), ('Constant', (2, 19, 2, 20), 1, None))], [('Expr', (3, 7, 3, 8), ('Constant', (3, 7, 3, 8), 2, None))], None)], [], None, None, [])], []), -('Module', [('AsyncFunctionDef', (1, 0, 2, 21), 'f', ('arguments', [], [], None, [], [], None, []), [('AsyncWith', (2, 1, 2, 21), [('withitem', ('Name', (2, 12, 2, 13), 'a', ('Load',)), ('Name', (2, 17, 2, 18), 'b', ('Store',)))], [('Expr', (2, 20, 2, 21), ('Constant', (2, 20, 2, 21), 1, None))], None)], [], None, None, [])], []), -('Module', [('Expr', (1, 0, 1, 14), ('Dict', (1, 0, 1, 14), [None, ('Constant', (1, 10, 1, 11), 2, None)], [('Dict', (1, 3, 1, 8), [('Constant', (1, 4, 1, 5), 1, None)], [('Constant', (1, 6, 1, 7), 2, None)]), ('Constant', (1, 12, 1, 13), 3, None)]))], []), -('Module', [('Expr', (1, 0, 1, 12), ('Set', (1, 0, 1, 12), [('Starred', (1, 1, 1, 8), ('Set', (1, 2, 1, 8), [('Constant', (1, 3, 1, 4), 1, None), ('Constant', (1, 6, 1, 7), 2, None)]), ('Load',)), ('Constant', (1, 10, 1, 11), 3, None)]))], []), -('Module', [('FunctionDef', (1, 0, 1, 16), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (1, 9, 1, 16), ('Yield', (1, 9, 1, 16), ('Constant', (1, 15, 1, 16), 1, None)))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 22), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (1, 9, 1, 22), ('YieldFrom', (1, 9, 1, 22), ('List', (1, 20, 1, 22), [], ('Load',))))], [], None, None, [])], []), -('Module', [('AsyncFunctionDef', (1, 0, 2, 21), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (2, 1, 2, 21), ('ListComp', (2, 1, 2, 21), ('Name', (2, 2, 2, 3), 'i', ('Load',)), [('comprehension', ('Name', (2, 14, 2, 15), 'b', ('Store',)), ('Name', (2, 19, 2, 20), 'c', ('Load',)), [], 1)]))], [], None, None, [])], []), -('Module', [('FunctionDef', (4, 0, 4, 13), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (4, 9, 4, 13))], [('Name', (1, 1, 1, 6), 'deco1', ('Load',)), ('Call', (2, 1, 2, 8), ('Name', (2, 1, 2, 6), 'deco2', ('Load',)), [], []), ('Call', (3, 1, 3, 9), ('Name', (3, 1, 3, 6), 'deco3', ('Load',)), [('Constant', (3, 7, 3, 8), 1, None)], [])], None, None, [])], []), -('Module', [('AsyncFunctionDef', (4, 0, 4, 19), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (4, 15, 4, 19))], [('Name', (1, 1, 1, 6), 'deco1', ('Load',)), ('Call', (2, 1, 2, 8), ('Name', (2, 1, 2, 6), 'deco2', ('Load',)), [], []), ('Call', (3, 1, 3, 9), ('Name', (3, 1, 3, 6), 'deco3', ('Load',)), [('Constant', (3, 7, 3, 8), 1, None)], [])], None, None, [])], []), -('Module', [('ClassDef', (4, 0, 4, 13), 'C', [], [], [('Pass', (4, 9, 4, 13))], [('Name', (1, 1, 1, 6), 'deco1', ('Load',)), ('Call', (2, 1, 2, 8), ('Name', (2, 1, 2, 6), 'deco2', ('Load',)), [], []), ('Call', (3, 1, 3, 9), ('Name', (3, 1, 3, 6), 'deco3', ('Load',)), [('Constant', (3, 7, 3, 8), 1, None)], [])], [])], []), -('Module', [('FunctionDef', (2, 0, 2, 13), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (2, 9, 2, 13))], [('Call', (1, 1, 1, 19), ('Name', (1, 1, 1, 5), 'deco', ('Load',)), [('GeneratorExp', (1, 5, 1, 19), ('Name', (1, 6, 1, 7), 'a', ('Load',)), [('comprehension', ('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 17, 1, 18), 'b', ('Load',)), [], 0)])], [])], None, None, [])], []), -('Module', [('FunctionDef', (2, 0, 2, 13), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (2, 9, 2, 13))], [('Attribute', (1, 1, 1, 6), ('Attribute', (1, 1, 1, 4), ('Name', (1, 1, 1, 2), 'a', ('Load',)), 'b', ('Load',)), 'c', ('Load',))], None, None, [])], []), -('Module', [('Expr', (1, 0, 1, 8), ('NamedExpr', (1, 1, 1, 7), ('Name', (1, 1, 1, 2), 'a', ('Store',)), ('Constant', (1, 6, 1, 7), 1, None)))], []), -('Module', [('If', (1, 0, 1, 19), ('NamedExpr', (1, 3, 1, 13), ('Name', (1, 3, 1, 4), 'a', ('Store',)), ('Call', (1, 8, 1, 13), ('Name', (1, 8, 1, 11), 'foo', ('Load',)), [], [])), [('Pass', (1, 15, 1, 19))], [])], []), -('Module', [('While', (1, 0, 1, 22), ('NamedExpr', (1, 6, 1, 16), ('Name', (1, 6, 1, 7), 'a', ('Store',)), ('Call', (1, 11, 1, 16), ('Name', (1, 11, 1, 14), 'foo', ('Load',)), [], [])), [('Pass', (1, 18, 1, 22))], [])], []), -('Module', [('FunctionDef', (1, 0, 1, 18), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [], None, [], [], None, []), [('Pass', (1, 14, 1, 18))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 26), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 12, 1, 13), 'c', None, None), ('arg', (1, 15, 1, 16), 'd', None, None), ('arg', (1, 18, 1, 19), 'e', None, None)], None, [], [], None, []), [('Pass', (1, 22, 1, 26))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 29), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 12, 1, 13), 'c', None, None)], None, [('arg', (1, 18, 1, 19), 'd', None, None), ('arg', (1, 21, 1, 22), 'e', None, None)], [None, None], None, []), [('Pass', (1, 25, 1, 29))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 39), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 12, 1, 13), 'c', None, None)], None, [('arg', (1, 18, 1, 19), 'd', None, None), ('arg', (1, 21, 1, 22), 'e', None, None)], [None, None], ('arg', (1, 26, 1, 32), 'kwargs', None, None), []), [('Pass', (1, 35, 1, 39))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 20), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [], None, [], [], None, [('Constant', (1, 8, 1, 9), 1, None)]), [('Pass', (1, 16, 1, 20))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 29), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None), ('arg', (1, 19, 1, 20), 'c', None, None)], None, [], [], None, [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None), ('Constant', (1, 21, 1, 22), 4, None)]), [('Pass', (1, 25, 1, 29))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 32), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None)], None, [('arg', (1, 22, 1, 23), 'c', None, None)], [('Constant', (1, 24, 1, 25), 4, None)], None, [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None)]), [('Pass', (1, 28, 1, 32))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 30), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None)], None, [('arg', (1, 22, 1, 23), 'c', None, None)], [None], None, [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None)]), [('Pass', (1, 26, 1, 30))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 42), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None)], None, [('arg', (1, 22, 1, 23), 'c', None, None)], [('Constant', (1, 24, 1, 25), 4, None)], ('arg', (1, 29, 1, 35), 'kwargs', None, None), [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None)]), [('Pass', (1, 38, 1, 42))], [], None, None, [])], []), -('Module', [('FunctionDef', (1, 0, 1, 40), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None)], None, [('arg', (1, 22, 1, 23), 'c', None, None)], [None], ('arg', (1, 27, 1, 33), 'kwargs', None, None), [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None)]), [('Pass', (1, 36, 1, 40))], [], None, None, [])], []), -('Module', [('TypeAlias', (1, 0, 1, 12), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [], ('Name', (1, 9, 1, 12), 'int', ('Load',)))], []), -('Module', [('TypeAlias', (1, 0, 1, 15), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 8), 'T', None, None)], ('Name', (1, 12, 1, 15), 'int', ('Load',)))], []), -('Module', [('TypeAlias', (1, 0, 1, 32), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 8), 'T', None, None), ('TypeVarTuple', (1, 10, 1, 13), 'Ts', None), ('ParamSpec', (1, 15, 1, 18), 'P', None)], ('Tuple', (1, 22, 1, 32), [('Name', (1, 23, 1, 24), 'T', ('Load',)), ('Name', (1, 26, 1, 28), 'Ts', ('Load',)), ('Name', (1, 30, 1, 31), 'P', ('Load',))], ('Load',)))], []), -('Module', [('TypeAlias', (1, 0, 1, 37), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 13), 'T', ('Name', (1, 10, 1, 13), 'int', ('Load',)), None), ('TypeVarTuple', (1, 15, 1, 18), 'Ts', None), ('ParamSpec', (1, 20, 1, 23), 'P', None)], ('Tuple', (1, 27, 1, 37), [('Name', (1, 28, 1, 29), 'T', ('Load',)), ('Name', (1, 31, 1, 33), 'Ts', ('Load',)), ('Name', (1, 35, 1, 36), 'P', ('Load',))], ('Load',)))], []), -('Module', [('TypeAlias', (1, 0, 1, 44), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 20), 'T', ('Tuple', (1, 10, 1, 20), [('Name', (1, 11, 1, 14), 'int', ('Load',)), ('Name', (1, 16, 1, 19), 'str', ('Load',))], ('Load',)), None), ('TypeVarTuple', (1, 22, 1, 25), 'Ts', None), ('ParamSpec', (1, 27, 1, 30), 'P', None)], ('Tuple', (1, 34, 1, 44), [('Name', (1, 35, 1, 36), 'T', ('Load',)), ('Name', (1, 38, 1, 40), 'Ts', ('Load',)), ('Name', (1, 42, 1, 43), 'P', ('Load',))], ('Load',)))], []), -('Module', [('TypeAlias', (1, 0, 1, 48), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 17), 'T', ('Name', (1, 10, 1, 13), 'int', ('Load',)), ('Constant', (1, 16, 1, 17), 1, None)), ('TypeVarTuple', (1, 19, 1, 26), 'Ts', ('Constant', (1, 25, 1, 26), 2, None)), ('ParamSpec', (1, 28, 1, 34), 'P', ('Constant', (1, 33, 1, 34), 3, None))], ('Tuple', (1, 38, 1, 48), [('Name', (1, 39, 1, 40), 'T', ('Load',)), ('Name', (1, 42, 1, 44), 'Ts', ('Load',)), ('Name', (1, 46, 1, 47), 'P', ('Load',))], ('Load',)))], []), -('Module', [('ClassDef', (1, 0, 1, 16), 'X', [], [], [('Pass', (1, 12, 1, 16))], [], [('TypeVar', (1, 8, 1, 9), 'T', None, None)])], []), -('Module', [('ClassDef', (1, 0, 1, 26), 'X', [], [], [('Pass', (1, 22, 1, 26))], [], [('TypeVar', (1, 8, 1, 9), 'T', None, None), ('TypeVarTuple', (1, 11, 1, 14), 'Ts', None), ('ParamSpec', (1, 16, 1, 19), 'P', None)])], []), -('Module', [('ClassDef', (1, 0, 1, 31), 'X', [], [], [('Pass', (1, 27, 1, 31))], [], [('TypeVar', (1, 8, 1, 14), 'T', ('Name', (1, 11, 1, 14), 'int', ('Load',)), None), ('TypeVarTuple', (1, 16, 1, 19), 'Ts', None), ('ParamSpec', (1, 21, 1, 24), 'P', None)])], []), -('Module', [('ClassDef', (1, 0, 1, 38), 'X', [], [], [('Pass', (1, 34, 1, 38))], [], [('TypeVar', (1, 8, 1, 21), 'T', ('Tuple', (1, 11, 1, 21), [('Name', (1, 12, 1, 15), 'int', ('Load',)), ('Name', (1, 17, 1, 20), 'str', ('Load',))], ('Load',)), None), ('TypeVarTuple', (1, 23, 1, 26), 'Ts', None), ('ParamSpec', (1, 28, 1, 31), 'P', None)])], []), -('Module', [('ClassDef', (1, 0, 1, 43), 'X', [], [], [('Pass', (1, 39, 1, 43))], [], [('TypeVar', (1, 8, 1, 18), 'T', ('Name', (1, 11, 1, 14), 'int', ('Load',)), ('Constant', (1, 17, 1, 18), 1, None)), ('TypeVarTuple', (1, 20, 1, 27), 'Ts', ('Constant', (1, 26, 1, 27), 2, None)), ('ParamSpec', (1, 29, 1, 36), 'P', ('Constant', (1, 35, 1, 36), 3, None))])], []), -('Module', [('FunctionDef', (1, 0, 1, 16), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 12, 1, 16))], [], None, None, [('TypeVar', (1, 6, 1, 7), 'T', None, None)])], []), -('Module', [('FunctionDef', (1, 0, 1, 26), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 22, 1, 26))], [], None, None, [('TypeVar', (1, 6, 1, 7), 'T', None, None), ('TypeVarTuple', (1, 9, 1, 12), 'Ts', None), ('ParamSpec', (1, 14, 1, 17), 'P', None)])], []), -('Module', [('FunctionDef', (1, 0, 1, 31), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 27, 1, 31))], [], None, None, [('TypeVar', (1, 6, 1, 12), 'T', ('Name', (1, 9, 1, 12), 'int', ('Load',)), None), ('TypeVarTuple', (1, 14, 1, 17), 'Ts', None), ('ParamSpec', (1, 19, 1, 22), 'P', None)])], []), -('Module', [('FunctionDef', (1, 0, 1, 38), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 34, 1, 38))], [], None, None, [('TypeVar', (1, 6, 1, 19), 'T', ('Tuple', (1, 9, 1, 19), [('Name', (1, 10, 1, 13), 'int', ('Load',)), ('Name', (1, 15, 1, 18), 'str', ('Load',))], ('Load',)), None), ('TypeVarTuple', (1, 21, 1, 24), 'Ts', None), ('ParamSpec', (1, 26, 1, 29), 'P', None)])], []), -('Module', [('FunctionDef', (1, 0, 1, 43), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 39, 1, 43))], [], None, None, [('TypeVar', (1, 6, 1, 16), 'T', ('Name', (1, 9, 1, 12), 'int', ('Load',)), ('Constant', (1, 15, 1, 16), 1, None)), ('TypeVarTuple', (1, 18, 1, 25), 'Ts', ('Constant', (1, 24, 1, 25), 2, None)), ('ParamSpec', (1, 27, 1, 34), 'P', ('Constant', (1, 33, 1, 34), 3, None))])], []), -('Module', [('Match', (1, 0, 3, 6), ('Name', (1, 6, 1, 7), 'x', ('Load',)), [('match_case', ('MatchValue', (2, 6, 2, 7), ('Constant', (2, 6, 2, 7), 1, None)), None, [('Pass', (3, 2, 3, 6))])])], []), -('Module', [('Match', (1, 0, 5, 6), ('Name', (1, 6, 1, 7), 'x', ('Load',)), [('match_case', ('MatchValue', (2, 6, 2, 7), ('Constant', (2, 6, 2, 7), 1, None)), None, [('Pass', (3, 2, 3, 6))]), ('match_case', ('MatchAs', (4, 6, 4, 7), None, None), None, [('Pass', (5, 2, 5, 6))])])], []), -] -single_results = [ -('Interactive', [('Expr', (1, 0, 1, 3), ('BinOp', (1, 0, 1, 3), ('Constant', (1, 0, 1, 1), 1, None), ('Add',), ('Constant', (1, 2, 1, 3), 2, None)))]), -] -eval_results = [ -('Expression', ('Constant', (1, 0, 1, 4), None, None)), -('Expression', ('Constant', (1, 0, 1, 4), True, None)), -('Expression', ('Constant', (1, 0, 1, 5), False, None)), -('Expression', ('BoolOp', (1, 0, 1, 7), ('And',), [('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Name', (1, 6, 1, 7), 'b', ('Load',))])), -('Expression', ('BoolOp', (1, 0, 1, 6), ('Or',), [('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Name', (1, 5, 1, 6), 'b', ('Load',))])), -('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Add',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), -('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Sub',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), -('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Mult',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), -('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Div',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), -('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('MatMult',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), -('Expression', ('BinOp', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('FloorDiv',), ('Name', (1, 5, 1, 6), 'b', ('Load',)))), -('Expression', ('BinOp', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Pow',), ('Name', (1, 5, 1, 6), 'b', ('Load',)))), -('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Mod',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), -('Expression', ('BinOp', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('RShift',), ('Name', (1, 5, 1, 6), 'b', ('Load',)))), -('Expression', ('BinOp', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('LShift',), ('Name', (1, 5, 1, 6), 'b', ('Load',)))), -('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('BitXor',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), -('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('BitOr',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), -('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('BitAnd',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))), -('Expression', ('UnaryOp', (1, 0, 1, 5), ('Not',), ('Name', (1, 4, 1, 5), 'v', ('Load',)))), -('Expression', ('UnaryOp', (1, 0, 1, 2), ('UAdd',), ('Name', (1, 1, 1, 2), 'v', ('Load',)))), -('Expression', ('UnaryOp', (1, 0, 1, 2), ('USub',), ('Name', (1, 1, 1, 2), 'v', ('Load',)))), -('Expression', ('UnaryOp', (1, 0, 1, 2), ('Invert',), ('Name', (1, 1, 1, 2), 'v', ('Load',)))), -('Expression', ('Lambda', (1, 0, 1, 11), ('arguments', [], [], None, [], [], None, []), ('Constant', (1, 7, 1, 11), None, None))), -('Expression', ('Dict', (1, 0, 1, 7), [('Constant', (1, 2, 1, 3), 1, None)], [('Constant', (1, 4, 1, 5), 2, None)])), -('Expression', ('Dict', (1, 0, 1, 2), [], [])), -('Expression', ('Set', (1, 0, 1, 7), [('Constant', (1, 1, 1, 5), None, None)])), -('Expression', ('Dict', (1, 0, 5, 6), [('Constant', (2, 6, 2, 7), 1, None)], [('Constant', (4, 10, 4, 11), 2, None)])), -('Expression', ('List', (1, 0, 5, 6), [('Constant', (2, 6, 2, 7), 1, None), ('Constant', (4, 8, 4, 9), 1, None)], ('Load',))), -('Expression', ('Tuple', (1, 0, 4, 6), [('Constant', (2, 6, 2, 7), 1, None)], ('Load',))), -('Expression', ('Set', (1, 0, 5, 6), [('Constant', (2, 6, 2, 7), 1, None), ('Constant', (4, 8, 4, 9), 1, None)])), -('Expression', ('ListComp', (1, 0, 1, 19), ('Name', (1, 1, 1, 2), 'a', ('Load',)), [('comprehension', ('Name', (1, 7, 1, 8), 'b', ('Store',)), ('Name', (1, 12, 1, 13), 'c', ('Load',)), [('Name', (1, 17, 1, 18), 'd', ('Load',))], 0)])), -('Expression', ('GeneratorExp', (1, 0, 1, 19), ('Name', (1, 1, 1, 2), 'a', ('Load',)), [('comprehension', ('Name', (1, 7, 1, 8), 'b', ('Store',)), ('Name', (1, 12, 1, 13), 'c', ('Load',)), [('Name', (1, 17, 1, 18), 'd', ('Load',))], 0)])), -('Expression', ('SetComp', (1, 0, 1, 19), ('Name', (1, 1, 1, 2), 'a', ('Load',)), [('comprehension', ('Name', (1, 7, 1, 8), 'b', ('Store',)), ('Name', (1, 12, 1, 13), 'c', ('Load',)), [('Name', (1, 17, 1, 18), 'd', ('Load',))], 0)])), -('Expression', ('DictComp', (1, 0, 1, 25), ('Name', (1, 1, 1, 2), 'k', ('Load',)), ('Name', (1, 4, 1, 5), 'v', ('Load',)), [('comprehension', ('Tuple', (1, 10, 1, 14), [('Name', (1, 10, 1, 11), 'k', ('Store',)), ('Name', (1, 13, 1, 14), 'v', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'c', ('Load',)), [('Name', (1, 23, 1, 24), 'd', ('Load',))], 0)])), -('Expression', ('ListComp', (1, 0, 1, 20), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 14), [('Name', (1, 11, 1, 12), 'a', ('Store',)), ('Name', (1, 13, 1, 14), 'b', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'c', ('Load',)), [], 0)])), -('Expression', ('ListComp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), -('Expression', ('ListComp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('List', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), -('Expression', ('SetComp', (1, 0, 1, 20), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 14), [('Name', (1, 11, 1, 12), 'a', ('Store',)), ('Name', (1, 13, 1, 14), 'b', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'c', ('Load',)), [], 0)])), -('Expression', ('SetComp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), -('Expression', ('SetComp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('List', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), -('Expression', ('GeneratorExp', (1, 0, 1, 20), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 14), [('Name', (1, 11, 1, 12), 'a', ('Store',)), ('Name', (1, 13, 1, 14), 'b', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'c', ('Load',)), [], 0)])), -('Expression', ('GeneratorExp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), -('Expression', ('GeneratorExp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('List', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])), -('Expression', ('Compare', (1, 0, 1, 9), ('Constant', (1, 0, 1, 1), 1, None), [('Lt',), ('Lt',)], [('Constant', (1, 4, 1, 5), 2, None), ('Constant', (1, 8, 1, 9), 3, None)])), -('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('Eq',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])), -('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('LtE',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])), -('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('GtE',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])), -('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('NotEq',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])), -('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('Is',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])), -('Expression', ('Compare', (1, 0, 1, 10), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('IsNot',)], [('Name', (1, 9, 1, 10), 'b', ('Load',))])), -('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('In',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])), -('Expression', ('Compare', (1, 0, 1, 10), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('NotIn',)], [('Name', (1, 9, 1, 10), 'b', ('Load',))])), -('Expression', ('Call', (1, 0, 1, 3), ('Name', (1, 0, 1, 1), 'f', ('Load',)), [], [])), -('Expression', ('Call', (1, 0, 1, 17), ('Name', (1, 0, 1, 1), 'f', ('Load',)), [('Constant', (1, 2, 1, 3), 1, None), ('Constant', (1, 4, 1, 5), 2, None), ('Starred', (1, 10, 1, 12), ('Name', (1, 11, 1, 12), 'd', ('Load',)), ('Load',))], [('keyword', (1, 6, 1, 9), 'c', ('Constant', (1, 8, 1, 9), 3, None)), ('keyword', (1, 13, 1, 16), None, ('Name', (1, 15, 1, 16), 'e', ('Load',)))])), -('Expression', ('Call', (1, 0, 1, 10), ('Name', (1, 0, 1, 1), 'f', ('Load',)), [('Starred', (1, 2, 1, 9), ('List', (1, 3, 1, 9), [('Constant', (1, 4, 1, 5), 0, None), ('Constant', (1, 7, 1, 8), 1, None)], ('Load',)), ('Load',))], [])), -('Expression', ('Call', (1, 0, 1, 15), ('Name', (1, 0, 1, 1), 'f', ('Load',)), [('GeneratorExp', (1, 1, 1, 15), ('Name', (1, 2, 1, 3), 'a', ('Load',)), [('comprehension', ('Name', (1, 8, 1, 9), 'a', ('Store',)), ('Name', (1, 13, 1, 14), 'b', ('Load',)), [], 0)])], [])), -('Expression', ('Constant', (1, 0, 1, 2), 10, None)), -('Expression', ('Constant', (1, 0, 1, 2), 1j, None)), -('Expression', ('Constant', (1, 0, 1, 8), 'string', None)), -('Expression', ('Attribute', (1, 0, 1, 3), ('Name', (1, 0, 1, 1), 'a', ('Load',)), 'b', ('Load',))), -('Expression', ('Subscript', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Slice', (1, 2, 1, 5), ('Name', (1, 2, 1, 3), 'b', ('Load',)), ('Name', (1, 4, 1, 5), 'c', ('Load',)), None), ('Load',))), -('Expression', ('Name', (1, 0, 1, 1), 'v', ('Load',))), -('Expression', ('List', (1, 0, 1, 7), [('Constant', (1, 1, 1, 2), 1, None), ('Constant', (1, 3, 1, 4), 2, None), ('Constant', (1, 5, 1, 6), 3, None)], ('Load',))), -('Expression', ('List', (1, 0, 1, 2), [], ('Load',))), -('Expression', ('Tuple', (1, 0, 1, 5), [('Constant', (1, 0, 1, 1), 1, None), ('Constant', (1, 2, 1, 3), 2, None), ('Constant', (1, 4, 1, 5), 3, None)], ('Load',))), -('Expression', ('Tuple', (1, 0, 1, 7), [('Constant', (1, 1, 1, 2), 1, None), ('Constant', (1, 3, 1, 4), 2, None), ('Constant', (1, 5, 1, 6), 3, None)], ('Load',))), -('Expression', ('Tuple', (1, 0, 1, 2), [], ('Load',))), -('Expression', ('Call', (1, 0, 1, 17), ('Attribute', (1, 0, 1, 7), ('Attribute', (1, 0, 1, 5), ('Attribute', (1, 0, 1, 3), ('Name', (1, 0, 1, 1), 'a', ('Load',)), 'b', ('Load',)), 'c', ('Load',)), 'd', ('Load',)), [('Subscript', (1, 8, 1, 16), ('Attribute', (1, 8, 1, 11), ('Name', (1, 8, 1, 9), 'a', ('Load',)), 'b', ('Load',)), ('Slice', (1, 12, 1, 15), ('Constant', (1, 12, 1, 13), 1, None), ('Constant', (1, 14, 1, 15), 2, None), None), ('Load',))], [])), -('Expression', ('Subscript', (1, 0, 1, 7), ('List', (1, 0, 1, 3), [('Constant', (1, 1, 1, 2), 5, None)], ('Load',)), ('Slice', (1, 4, 1, 6), ('Constant', (1, 4, 1, 5), 1, None), None, None), ('Load',))), -('Expression', ('Subscript', (1, 0, 1, 7), ('List', (1, 0, 1, 3), [('Constant', (1, 1, 1, 2), 5, None)], ('Load',)), ('Slice', (1, 4, 1, 6), None, ('Constant', (1, 5, 1, 6), 1, None), None), ('Load',))), -('Expression', ('Subscript', (1, 0, 1, 8), ('List', (1, 0, 1, 3), [('Constant', (1, 1, 1, 2), 5, None)], ('Load',)), ('Slice', (1, 4, 1, 7), None, None, ('Constant', (1, 6, 1, 7), 1, None)), ('Load',))), -('Expression', ('Subscript', (1, 0, 1, 10), ('List', (1, 0, 1, 3), [('Constant', (1, 1, 1, 2), 5, None)], ('Load',)), ('Slice', (1, 4, 1, 9), ('Constant', (1, 4, 1, 5), 1, None), ('Constant', (1, 6, 1, 7), 1, None), ('Constant', (1, 8, 1, 9), 1, None)), ('Load',))), -('Expression', ('IfExp', (1, 0, 1, 21), ('Name', (1, 9, 1, 10), 'x', ('Load',)), ('Call', (1, 0, 1, 5), ('Name', (1, 0, 1, 3), 'foo', ('Load',)), [], []), ('Call', (1, 16, 1, 21), ('Name', (1, 16, 1, 19), 'bar', ('Load',)), [], []))), -('Expression', ('JoinedStr', (1, 0, 1, 6), [('FormattedValue', (1, 2, 1, 5), ('Name', (1, 3, 1, 4), 'a', ('Load',)), -1, None)])), -('Expression', ('JoinedStr', (1, 0, 1, 10), [('FormattedValue', (1, 2, 1, 9), ('Name', (1, 3, 1, 4), 'a', ('Load',)), -1, ('JoinedStr', (1, 4, 1, 8), [('Constant', (1, 5, 1, 8), '.2f', None)]))])), -('Expression', ('JoinedStr', (1, 0, 1, 8), [('FormattedValue', (1, 2, 1, 7), ('Name', (1, 3, 1, 4), 'a', ('Load',)), 114, None)])), -('Expression', ('JoinedStr', (1, 0, 1, 11), [('Constant', (1, 2, 1, 6), 'foo(', None), ('FormattedValue', (1, 6, 1, 9), ('Name', (1, 7, 1, 8), 'a', ('Load',)), -1, None), ('Constant', (1, 9, 1, 10), ')', None)])), -] -main() diff --git a/Lib/test/test_ast/utils.py b/Lib/test/test_ast/utils.py new file mode 100644 index 00000000000000..145e89ee94e935 --- /dev/null +++ b/Lib/test/test_ast/utils.py @@ -0,0 +1,15 @@ +def to_tuple(t): + if t is None or isinstance(t, (str, int, complex, float, bytes)) or t is Ellipsis: + return t + elif isinstance(t, list): + return [to_tuple(e) for e in t] + result = [t.__class__.__name__] + if hasattr(t, 'lineno') and hasattr(t, 'col_offset'): + result.append((t.lineno, t.col_offset)) + if hasattr(t, 'end_lineno') and hasattr(t, 'end_col_offset'): + result[-1] += (t.end_lineno, t.end_col_offset) + if t._fields is None: + return tuple(result) + for f in t._fields: + result.append(to_tuple(getattr(t, f))) + return tuple(result) diff --git a/Makefile.pre.in b/Makefile.pre.in index 9ea7bc49be316c..5608e593ac9aca 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -2366,6 +2366,7 @@ LIBSUBDIRS= asyncio \ __phello__ TESTSUBDIRS= idlelib/idle_test \ test \ + test/test_ast \ test/archivetestdata \ test/audiodata \ test/certdata \ From 89fa05fdce20cc0a19689eb365f1828b086d0b17 Mon Sep 17 00:00:00 2001 From: Petr Viktorin Date: Mon, 29 Jul 2024 18:10:25 +0200 Subject: [PATCH 10/70] gh-122234: Add DECREFs to error paths (#122406) Co-Authored-By: Kirill Podoprigora --- Python/bltinmodule.c | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c index ae025e767ec838..99ed06972be98e 100644 --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -2694,6 +2694,8 @@ builtin_sum_impl(PyObject *module, PyObject *iterable, PyObject *start) continue; } else { + Py_DECREF(item); + Py_DECREF(iter); return NULL; } } @@ -2745,6 +2747,8 @@ builtin_sum_impl(PyObject *module, PyObject *iterable, PyObject *start) continue; } else { + Py_DECREF(item); + Py_DECREF(iter); return NULL; } } From 490e0ad83ac72c5688dfbbab4eac61ccfd7be5fd Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Mon, 29 Jul 2024 10:23:23 -0600 Subject: [PATCH 11/70] gh-117482: Fix the Slot Wrapper Inheritance Tests (gh-122248) The tests were only checking cases where the slot wrapper was present in the initial case. They were missing when the slot wrapper was added in the additional initializations. This fixes that. --- Lib/test/support/__init__.py | 55 ++++++++++++++++++++++++++++++ Lib/test/test_embed.py | 57 ++++++++++++++++++++----------- Lib/test/test_types.py | 65 +++++++++++++++++++++--------------- Programs/_testembed.c | 14 ++++++-- 4 files changed, 143 insertions(+), 48 deletions(-) diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index 37e3305036f499..f4dce793ff1acb 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -2608,6 +2608,61 @@ def copy_python_src_ignore(path, names): return ignored +def iter_builtin_types(): + for obj in __builtins__.values(): + if not isinstance(obj, type): + continue + cls = obj + if cls.__module__ != 'builtins': + continue + yield cls + + +def iter_slot_wrappers(cls): + assert cls.__module__ == 'builtins', cls + + def is_slot_wrapper(name, value): + if not isinstance(value, types.WrapperDescriptorType): + assert not repr(value).startswith(' 3 + ? main_argc - 2 + : INIT_LOOPS; - for (int i=1; i <= INIT_LOOPS; i++) { - fprintf(stderr, "--- Loop #%d ---\n", i); + for (int i=0; i < loops; i++) { + fprintf(stderr, "--- Loop #%d ---\n", i+1); fflush(stderr); + if (main_argc > 3) { + code = main_argv[i+2]; + } + _testembed_Py_InitializeFromConfig(); int err = PyRun_SimpleString(code); Py_Finalize(); From 68840e91ac6689d3954b98a9ab136e194b5250b8 Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Mon, 29 Jul 2024 21:52:48 +0300 Subject: [PATCH 12/70] gh-122311: Fix a refleak in pickle (GH-122411) --- Modules/_pickle.c | 1 + 1 file changed, 1 insertion(+) diff --git a/Modules/_pickle.c b/Modules/_pickle.c index 452b4aff0237ca..50c73dca0db281 100644 --- a/Modules/_pickle.c +++ b/Modules/_pickle.c @@ -3123,6 +3123,7 @@ batch_dict(PickleState *state, PicklerObject *self, PyObject *iter) if (!PyTuple_Check(obj) || PyTuple_Size(obj) != 2) { PyErr_SetString(PyExc_TypeError, "dict items " "iterator must return 2-tuples"); + Py_DECREF(obj); return -1; } i = save(state, self, PyTuple_GET_ITEM(obj, 0), 0); From 15d4cd096758ca089c6bd6ed808c34cca676d9bb Mon Sep 17 00:00:00 2001 From: Brandt Bucher Date: Mon, 29 Jul 2024 12:17:47 -0700 Subject: [PATCH 13/70] GH-116090: Fire RAISE events from _FOR_ITER_TIER_TWO (GH-122413) --- Include/internal/pycore_ceval.h | 1 + .../2024-07-29-10-55-46.gh-issue-116090.p1MhU0.rst | 2 ++ Python/bytecodes.c | 7 ++++--- Python/ceval.c | 9 +++------ Python/executor_cases.c.h | 1 + Python/generated_cases.c.h | 6 +++--- 6 files changed, 14 insertions(+), 12 deletions(-) create mode 100644 Misc/NEWS.d/next/Core_and_Builtins/2024-07-29-10-55-46.gh-issue-116090.p1MhU0.rst diff --git a/Include/internal/pycore_ceval.h b/Include/internal/pycore_ceval.h index fac4a4d228053e..4fdee9fdf2a1ff 100644 --- a/Include/internal/pycore_ceval.h +++ b/Include/internal/pycore_ceval.h @@ -263,6 +263,7 @@ PyAPI_FUNC(PyObject *) _PyEval_ImportFrom(PyThreadState *, PyObject *, PyObject PyAPI_FUNC(PyObject *) _PyEval_ImportName(PyThreadState *, _PyInterpreterFrame *, PyObject *, PyObject *, PyObject *); PyAPI_FUNC(PyObject *)_PyEval_MatchClass(PyThreadState *tstate, PyObject *subject, PyObject *type, Py_ssize_t nargs, PyObject *kwargs); PyAPI_FUNC(PyObject *)_PyEval_MatchKeys(PyThreadState *tstate, PyObject *map, PyObject *keys); +PyAPI_FUNC(void) _PyEval_MonitorRaise(PyThreadState *tstate, _PyInterpreterFrame *frame, _Py_CODEUNIT *instr); PyAPI_FUNC(int) _PyEval_UnpackIterableStackRef(PyThreadState *tstate, _PyStackRef v, int argcnt, int argcntafter, _PyStackRef *sp); PyAPI_FUNC(void) _PyEval_FrameClearAndPop(PyThreadState *tstate, _PyInterpreterFrame *frame); PyAPI_FUNC(PyObject **) _PyObjectArray_FromStackRefArray(_PyStackRef *input, Py_ssize_t nargs, PyObject **scratch); diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-07-29-10-55-46.gh-issue-116090.p1MhU0.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-07-29-10-55-46.gh-issue-116090.p1MhU0.rst new file mode 100644 index 00000000000000..6efb620961f498 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-07-29-10-55-46.gh-issue-116090.p1MhU0.rst @@ -0,0 +1,2 @@ +Fix an issue in JIT builds that prevented some :keyword:`for` loops from +correctly firing :monitoring-event:`RAISE` monitoring events. diff --git a/Python/bytecodes.c b/Python/bytecodes.c index d74f2aae0483ce..4afce2cc3bea9d 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -1124,7 +1124,7 @@ dummy_func( if (retval_o == NULL) { if (_PyErr_ExceptionMatches(tstate, PyExc_StopIteration) ) { - monitor_raise(tstate, frame, this_instr); + _PyEval_MonitorRaise(tstate, frame, this_instr); } if (_PyGen_FetchStopIterationValue(&retval_o) == 0) { assert(retval_o != NULL); @@ -2824,7 +2824,7 @@ dummy_func( if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { ERROR_NO_POP(); } - monitor_raise(tstate, frame, this_instr); + _PyEval_MonitorRaise(tstate, frame, this_instr); _PyErr_Clear(tstate); } /* iterator ended normally */ @@ -2849,6 +2849,7 @@ dummy_func( if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { ERROR_NO_POP(); } + _PyEval_MonitorRaise(tstate, frame, frame->instr_ptr); _PyErr_Clear(tstate); } /* iterator ended normally */ @@ -2875,7 +2876,7 @@ dummy_func( if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { ERROR_NO_POP(); } - monitor_raise(tstate, frame, this_instr); + _PyEval_MonitorRaise(tstate, frame, this_instr); _PyErr_Clear(tstate); } /* iterator ended normally */ diff --git a/Python/ceval.c b/Python/ceval.c index c0074c45b27111..425a2a01bea8ed 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -225,9 +225,6 @@ maybe_lltrace_resume_frame(_PyInterpreterFrame *frame, _PyInterpreterFrame *skip #endif -static void monitor_raise(PyThreadState *tstate, - _PyInterpreterFrame *frame, - _Py_CODEUNIT *instr); static void monitor_reraise(PyThreadState *tstate, _PyInterpreterFrame *frame, _Py_CODEUNIT *instr); @@ -884,7 +881,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int PyTraceBack_Here(f); } } - monitor_raise(tstate, frame, next_instr-1); + _PyEval_MonitorRaise(tstate, frame, next_instr-1); exception_unwind: { /* We can't use frame->instr_ptr here, as RERAISE may have set it */ @@ -2200,8 +2197,8 @@ no_tools_for_local_event(PyThreadState *tstate, _PyInterpreterFrame *frame, int } } -static void -monitor_raise(PyThreadState *tstate, _PyInterpreterFrame *frame, +void +_PyEval_MonitorRaise(PyThreadState *tstate, _PyInterpreterFrame *frame, _Py_CODEUNIT *instr) { if (no_tools_for_global_event(tstate, PY_MONITORING_EVENT_RAISE)) { diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h index 6e3f6cc62fe11f..62654035e80f50 100644 --- a/Python/executor_cases.c.h +++ b/Python/executor_cases.c.h @@ -3173,6 +3173,7 @@ if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { JUMP_TO_ERROR(); } + _PyEval_MonitorRaise(tstate, frame, frame->instr_ptr); _PyErr_Clear(tstate); } /* iterator ended normally */ diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index 76d1cc7ad6cf95..3c643f637ab095 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -3063,7 +3063,7 @@ if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { goto error; } - monitor_raise(tstate, frame, this_instr); + _PyEval_MonitorRaise(tstate, frame, this_instr); _PyErr_Clear(tstate); } /* iterator ended normally */ @@ -3731,7 +3731,7 @@ if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { goto error; } - monitor_raise(tstate, frame, this_instr); + _PyEval_MonitorRaise(tstate, frame, this_instr); _PyErr_Clear(tstate); } /* iterator ended normally */ @@ -6026,7 +6026,7 @@ if (retval_o == NULL) { if (_PyErr_ExceptionMatches(tstate, PyExc_StopIteration) ) { - monitor_raise(tstate, frame, this_instr); + _PyEval_MonitorRaise(tstate, frame, this_instr); } if (_PyGen_FetchStopIterationValue(&retval_o) == 0) { assert(retval_o != NULL); From 046670c3a0480560b6bfa06727fd7aa6a1798614 Mon Sep 17 00:00:00 2001 From: Donghee Na Date: Tue, 30 Jul 2024 04:20:36 +0900 Subject: [PATCH 14/70] gh-121996: Fix --disable-safety and --enable-slower-safety options (gh-122414) --- configure | 19 +++++++++++++------ configure.ac | 11 ++++++----- 2 files changed, 19 insertions(+), 11 deletions(-) diff --git a/configure b/configure index 52988f77f6d926..39ab48fa4e2526 100755 --- a/configure +++ b/configure @@ -9682,10 +9682,10 @@ then : then : disable_safety=no else $as_nop - disable_saftey=yes + disable_safety=yes fi else $as_nop - disable_saftey=no + disable_safety=no fi { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $disable_safety" >&5 @@ -9726,7 +9726,7 @@ fi printf "%s\n" "$ax_cv_check_cflags__Werror__fstack_protector_strong" >&6; } if test "x$ax_cv_check_cflags__Werror__fstack_protector_strong" = xyes then : - BASECFLAGS="$BASECFLAGS -fstack-protector-strong" + CFLAGS_NODIST="$CFLAGS_NODIST -fstack-protector-strong" else $as_nop { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: -fstack-protector-strong not supported" >&5 printf "%s\n" "$as_me: WARNING: -fstack-protector-strong not supported" >&2;} @@ -9765,7 +9765,7 @@ fi printf "%s\n" "$ax_cv_check_cflags__Werror__Wtrampolines" >&6; } if test "x$ax_cv_check_cflags__Werror__Wtrampolines" = xyes then : - BASECFLAGS="$BASECFLAGS -Wtrampolines" + CFLAGS_NODIST="$CFLAGS_NODIST -Wtrampolines" else $as_nop { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: -Wtrampolines not supported" >&5 printf "%s\n" "$as_me: WARNING: -Wtrampolines not supported" >&2;} @@ -9778,7 +9778,14 @@ printf %s "checking for --enable-slower-safety... " >&6; } # Check whether --enable-slower-safety was given. if test ${enable_slower_safety+y} then : - enableval=$enable_slower_safety; + enableval=$enable_slower_safety; if test "x$disable_slower_safety" = xyes +then : + enable_slower_safety=no +else $as_nop + enable_slower_safety=yes +fi +else $as_nop + enable_slower_safety=no fi { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $enable_slower_safety" >&5 @@ -9819,7 +9826,7 @@ fi printf "%s\n" "$ax_cv_check_cflags__Werror__D_FORTIFY_SOURCE_3" >&6; } if test "x$ax_cv_check_cflags__Werror__D_FORTIFY_SOURCE_3" = xyes then : - BASECFLAGS="$BASECFLAGS -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=3" + CFLAGS_NODIST="$CFLAGS_NODIST -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=3" else $as_nop { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: -D_FORTIFY_SOURCE=3 not supported" >&5 printf "%s\n" "$as_me: WARNING: -D_FORTIFY_SOURCE=3 not supported" >&2;} diff --git a/configure.ac b/configure.ac index 5bde6803cd5a7b..62ed812991fc4e 100644 --- a/configure.ac +++ b/configure.ac @@ -2503,23 +2503,24 @@ AS_VAR_IF([with_strict_overflow], [yes], AC_MSG_CHECKING([for --disable-safety]) AC_ARG_ENABLE([safety], [AS_HELP_STRING([--disable-safety], [disable usage of the security compiler options with no performance overhead])], - [AS_VAR_IF([enable_safety], [yes], [disable_safety=no], [disable_saftey=yes])], [disable_saftey=no]) + [AS_VAR_IF([enable_safety], [yes], [disable_safety=no], [disable_safety=yes])], [disable_safety=no]) AC_MSG_RESULT([$disable_safety]) if test "$disable_safety" = "no" then - AX_CHECK_COMPILE_FLAG([-fstack-protector-strong], [BASECFLAGS="$BASECFLAGS -fstack-protector-strong"], [AC_MSG_WARN([-fstack-protector-strong not supported])], [-Werror]) - AX_CHECK_COMPILE_FLAG([-Wtrampolines], [BASECFLAGS="$BASECFLAGS -Wtrampolines"], [AC_MSG_WARN([-Wtrampolines not supported])], [-Werror]) + AX_CHECK_COMPILE_FLAG([-fstack-protector-strong], [CFLAGS_NODIST="$CFLAGS_NODIST -fstack-protector-strong"], [AC_MSG_WARN([-fstack-protector-strong not supported])], [-Werror]) + AX_CHECK_COMPILE_FLAG([-Wtrampolines], [CFLAGS_NODIST="$CFLAGS_NODIST -Wtrampolines"], [AC_MSG_WARN([-Wtrampolines not supported])], [-Werror]) fi AC_MSG_CHECKING([for --enable-slower-safety]) AC_ARG_ENABLE([slower-safety], - [AS_HELP_STRING([--enable-slower-safety], [enable usage of the security compiler options with performance overhead])],[]) + [AS_HELP_STRING([--enable-slower-safety], [enable usage of the security compiler options with performance overhead])], + [AS_VAR_IF([disable_slower_safety], [yes], [enable_slower_safety=no], [enable_slower_safety=yes])], [enable_slower_safety=no]) AC_MSG_RESULT([$enable_slower_safety]) if test "$enable_slower_safety" = "yes" then - AX_CHECK_COMPILE_FLAG([-D_FORTIFY_SOURCE=3], [BASECFLAGS="$BASECFLAGS -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=3"], [AC_MSG_WARN([-D_FORTIFY_SOURCE=3 not supported])], [-Werror]) + AX_CHECK_COMPILE_FLAG([-D_FORTIFY_SOURCE=3], [CFLAGS_NODIST="$CFLAGS_NODIST -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=3"], [AC_MSG_WARN([-D_FORTIFY_SOURCE=3 not supported])], [-Werror]) fi case $GCC in From 76bdfa4cd02532519fb43ae91244e2b4b3650d78 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Mon, 29 Jul 2024 22:20:40 +0100 Subject: [PATCH 15/70] GH-122085: Use include files for C API deprecations (#109843) --- .../c-api-pending-removal-in-3.14.rst | 46 ++++++++ .../c-api-pending-removal-in-3.15.rst | 20 ++++ .../c-api-pending-removal-in-future.rst | 31 ++++++ Doc/deprecations/index.rst | 9 +- Doc/whatsnew/3.12.rst | 87 +-------------- Doc/whatsnew/3.13.rst | 103 ++---------------- Doc/whatsnew/3.14.rst | 6 + 7 files changed, 121 insertions(+), 181 deletions(-) create mode 100644 Doc/deprecations/c-api-pending-removal-in-3.14.rst create mode 100644 Doc/deprecations/c-api-pending-removal-in-3.15.rst create mode 100644 Doc/deprecations/c-api-pending-removal-in-future.rst diff --git a/Doc/deprecations/c-api-pending-removal-in-3.14.rst b/Doc/deprecations/c-api-pending-removal-in-3.14.rst new file mode 100644 index 00000000000000..369892a75b16eb --- /dev/null +++ b/Doc/deprecations/c-api-pending-removal-in-3.14.rst @@ -0,0 +1,46 @@ +Pending Removal in Python 3.14 +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* The ``ma_version_tag`` field in :c:type:`PyDictObject` for extension modules + (:pep:`699`; :gh:`101193`). + +* Creating :c:data:`immutable types ` with mutable + bases (:gh:`95388`). + +* Functions to configure Python's initialization, deprecated in Python 3.11: + + * ``PySys_SetArgvEx()``: set :c:member:`PyConfig.argv` instead. + * ``PySys_SetArgv()``: set :c:member:`PyConfig.argv` instead. + * ``Py_SetProgramName()``: set :c:member:`PyConfig.program_name` instead. + * ``Py_SetPythonHome()``: set :c:member:`PyConfig.home` instead. + + The :c:func:`Py_InitializeFromConfig` API should be used with + :c:type:`PyConfig` instead. + +* Global configuration variables: + + * :c:var:`Py_DebugFlag`: use :c:member:`PyConfig.parser_debug` instead. + * :c:var:`Py_VerboseFlag`: use :c:member:`PyConfig.verbose` instead. + * :c:var:`Py_QuietFlag`: use :c:member:`PyConfig.quiet` instead. + * :c:var:`Py_InteractiveFlag`: use :c:member:`PyConfig.interactive` instead. + * :c:var:`Py_InspectFlag`: use :c:member:`PyConfig.inspect` instead. + * :c:var:`Py_OptimizeFlag`: use :c:member:`PyConfig.optimization_level` instead. + * :c:var:`Py_NoSiteFlag`: use :c:member:`PyConfig.site_import` instead. + * :c:var:`Py_BytesWarningFlag`: use :c:member:`PyConfig.bytes_warning` instead. + * :c:var:`Py_FrozenFlag`: use :c:member:`PyConfig.pathconfig_warnings` instead. + * :c:var:`Py_IgnoreEnvironmentFlag`: use :c:member:`PyConfig.use_environment` instead. + * :c:var:`Py_DontWriteBytecodeFlag`: use :c:member:`PyConfig.write_bytecode` instead. + * :c:var:`Py_NoUserSiteDirectory`: use :c:member:`PyConfig.user_site_directory` instead. + * :c:var:`Py_UnbufferedStdioFlag`: use :c:member:`PyConfig.buffered_stdio` instead. + * :c:var:`Py_HashRandomizationFlag`: use :c:member:`PyConfig.use_hash_seed` + and :c:member:`PyConfig.hash_seed` instead. + * :c:var:`Py_IsolatedFlag`: use :c:member:`PyConfig.isolated` instead. + * :c:var:`Py_LegacyWindowsFSEncodingFlag`: use :c:member:`PyPreConfig.legacy_windows_fs_encoding` instead. + * :c:var:`Py_LegacyWindowsStdioFlag`: use :c:member:`PyConfig.legacy_windows_stdio` instead. + * :c:var:`!Py_FileSystemDefaultEncoding`: use :c:member:`PyConfig.filesystem_encoding` instead. + * :c:var:`!Py_HasFileSystemDefaultEncoding`: use :c:member:`PyConfig.filesystem_encoding` instead. + * :c:var:`!Py_FileSystemDefaultEncodeErrors`: use :c:member:`PyConfig.filesystem_errors` instead. + * :c:var:`!Py_UTF8Mode`: use :c:member:`PyPreConfig.utf8_mode` instead. (see :c:func:`Py_PreInitialize`) + + The :c:func:`Py_InitializeFromConfig` API should be used with + :c:type:`PyConfig` instead. diff --git a/Doc/deprecations/c-api-pending-removal-in-3.15.rst b/Doc/deprecations/c-api-pending-removal-in-3.15.rst new file mode 100644 index 00000000000000..c676927ed69212 --- /dev/null +++ b/Doc/deprecations/c-api-pending-removal-in-3.15.rst @@ -0,0 +1,20 @@ +Pending Removal in Python 3.15 +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* The bundled copy of ``libmpdecimal``. +* :c:func:`PyImport_ImportModuleNoBlock`: use :c:func:`PyImport_ImportModule` instead. +* :c:func:`PyWeakref_GET_OBJECT`: use :c:func:`PyWeakref_GetRef` instead. +* :c:func:`PyWeakref_GetObject`: use :c:func:`PyWeakref_GetRef` instead. +* :c:type:`!Py_UNICODE_WIDE` type: use :c:type:`wchar_t` instead. +* :c:type:`Py_UNICODE` type: use :c:type:`wchar_t` instead. +* Python initialization functions: + + * :c:func:`PySys_ResetWarnOptions`: clear :data:`sys.warnoptions` and + :data:`!warnings.filters` instead. + * :c:func:`Py_GetExecPrefix`: get :data:`sys.exec_prefix` instead. + * :c:func:`Py_GetPath`: get :data:`sys.path` instead. + * :c:func:`Py_GetPrefix`: get :data:`sys.prefix` instead. + * :c:func:`Py_GetProgramFullPath`: get :data:`sys.executable` instead. + * :c:func:`Py_GetProgramName`: get :data:`sys.executable` instead. + * :c:func:`Py_GetPythonHome`: get :c:member:`PyConfig.home` or + the :envvar:`PYTHONHOME` environment variable instead. diff --git a/Doc/deprecations/c-api-pending-removal-in-future.rst b/Doc/deprecations/c-api-pending-removal-in-future.rst new file mode 100644 index 00000000000000..f646be45c8a770 --- /dev/null +++ b/Doc/deprecations/c-api-pending-removal-in-future.rst @@ -0,0 +1,31 @@ +Pending Removal in Future Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The following APIs are deprecated and will be removed, +although there is currently no date scheduled for their removal. + +* :c:macro:`Py_TPFLAGS_HAVE_FINALIZE`: unneeded since Python 3.8. +* :c:func:`PyErr_Fetch`: use :c:func:`PyErr_GetRaisedException` instead. +* :c:func:`PyErr_NormalizeException`: use :c:func:`PyErr_GetRaisedException` instead. +* :c:func:`PyErr_Restore`: use :c:func:`PyErr_SetRaisedException` instead. +* :c:func:`PyModule_GetFilename`: use :c:func:`PyModule_GetFilenameObject` instead. +* :c:func:`PyOS_AfterFork`: use :c:func:`PyOS_AfterFork_Child` instead. +* :c:func:`PySlice_GetIndicesEx`: use :c:func:`PySlice_Unpack` and :c:func:`PySlice_AdjustIndices` instead. +* :c:func:`!PyUnicode_AsDecodedObject`: use :c:func:`PyCodec_Decode` instead. +* :c:func:`!PyUnicode_AsDecodedUnicode`: use :c:func:`PyCodec_Decode` instead. +* :c:func:`!PyUnicode_AsEncodedObject`: use :c:func:`PyCodec_Encode` instead. +* :c:func:`!PyUnicode_AsEncodedUnicode`: use :c:func:`PyCodec_Encode` instead. +* :c:func:`PyUnicode_READY`: unneeded since Python 3.12 +* :c:func:`!PyErr_Display`: use :c:func:`PyErr_DisplayException` instead. +* :c:func:`!_PyErr_ChainExceptions`: use ``_PyErr_ChainExceptions1`` instead. +* :c:member:`!PyBytesObject.ob_shash` member: + call :c:func:`PyObject_Hash` instead. +* :c:member:`!PyDictObject.ma_version_tag` member. +* Thread Local Storage (TLS) API: + + * :c:func:`PyThread_create_key`: use :c:func:`PyThread_tss_alloc` instead. + * :c:func:`PyThread_delete_key`: use :c:func:`PyThread_tss_free` instead. + * :c:func:`PyThread_set_key_value`: use :c:func:`PyThread_tss_set` instead. + * :c:func:`PyThread_get_key_value`: use :c:func:`PyThread_tss_get` instead. + * :c:func:`PyThread_delete_key_value`: use :c:func:`PyThread_tss_delete` instead. + * :c:func:`PyThread_ReInitTLS`: unneeded since Python 3.7. diff --git a/Doc/deprecations/index.rst b/Doc/deprecations/index.rst index cfb30dd09aef6f..a9efb0bc744335 100644 --- a/Doc/deprecations/index.rst +++ b/Doc/deprecations/index.rst @@ -1,10 +1,15 @@ Deprecations ============ -.. include:: pending-removal-in-3.14.rst - .. include:: pending-removal-in-3.15.rst .. include:: pending-removal-in-3.16.rst .. include:: pending-removal-in-future.rst + +C API Deprecations +------------------ + +.. include:: c-api-pending-removal-in-3.15.rst + +.. include:: c-api-pending-removal-in-future.rst diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst index fc2b6519fb1307..3821ee3648e909 100644 --- a/Doc/whatsnew/3.12.rst +++ b/Doc/whatsnew/3.12.rst @@ -2210,92 +2210,13 @@ Deprecated overrides :c:member:`~PyTypeObject.tp_new` is deprecated. Call the metaclass instead. -Pending Removal in Python 3.14 -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +.. Add deprecations above alphabetically, not here at the end. -* The ``ma_version_tag`` field in :c:type:`PyDictObject` for extension modules - (:pep:`699`; :gh:`101193`). +.. include:: ../deprecations/c-api-pending-removal-in-3.14.rst -* Global configuration variables: +.. include:: ../deprecations/c-api-pending-removal-in-3.15.rst - * :c:var:`Py_DebugFlag`: use :c:member:`PyConfig.parser_debug` - * :c:var:`Py_VerboseFlag`: use :c:member:`PyConfig.verbose` - * :c:var:`Py_QuietFlag`: use :c:member:`PyConfig.quiet` - * :c:var:`Py_InteractiveFlag`: use :c:member:`PyConfig.interactive` - * :c:var:`Py_InspectFlag`: use :c:member:`PyConfig.inspect` - * :c:var:`Py_OptimizeFlag`: use :c:member:`PyConfig.optimization_level` - * :c:var:`Py_NoSiteFlag`: use :c:member:`PyConfig.site_import` - * :c:var:`Py_BytesWarningFlag`: use :c:member:`PyConfig.bytes_warning` - * :c:var:`Py_FrozenFlag`: use :c:member:`PyConfig.pathconfig_warnings` - * :c:var:`Py_IgnoreEnvironmentFlag`: use :c:member:`PyConfig.use_environment` - * :c:var:`Py_DontWriteBytecodeFlag`: use :c:member:`PyConfig.write_bytecode` - * :c:var:`Py_NoUserSiteDirectory`: use :c:member:`PyConfig.user_site_directory` - * :c:var:`Py_UnbufferedStdioFlag`: use :c:member:`PyConfig.buffered_stdio` - * :c:var:`Py_HashRandomizationFlag`: use :c:member:`PyConfig.use_hash_seed` - and :c:member:`PyConfig.hash_seed` - * :c:var:`Py_IsolatedFlag`: use :c:member:`PyConfig.isolated` - * :c:var:`Py_LegacyWindowsFSEncodingFlag`: use :c:member:`PyPreConfig.legacy_windows_fs_encoding` - * :c:var:`Py_LegacyWindowsStdioFlag`: use :c:member:`PyConfig.legacy_windows_stdio` - * :c:var:`!Py_FileSystemDefaultEncoding`: use :c:member:`PyConfig.filesystem_encoding` - * :c:var:`!Py_HasFileSystemDefaultEncoding`: use :c:member:`PyConfig.filesystem_encoding` - * :c:var:`!Py_FileSystemDefaultEncodeErrors`: use :c:member:`PyConfig.filesystem_errors` - * :c:var:`!Py_UTF8Mode`: use :c:member:`PyPreConfig.utf8_mode` (see :c:func:`Py_PreInitialize`) - - The :c:func:`Py_InitializeFromConfig` API should be used with - :c:type:`PyConfig` instead. - -* Creating :c:data:`immutable types ` with mutable - bases (:gh:`95388`). - -Pending Removal in Python 3.15 -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -* :c:func:`PyImport_ImportModuleNoBlock`: use :c:func:`PyImport_ImportModule` -* :c:type:`!Py_UNICODE_WIDE` type: use :c:type:`wchar_t` -* :c:type:`Py_UNICODE` type: use :c:type:`wchar_t` -* Python initialization functions: - - * :c:func:`PySys_ResetWarnOptions`: clear :data:`sys.warnoptions` and - :data:`!warnings.filters` - * :c:func:`Py_GetExecPrefix`: get :data:`sys.exec_prefix` - * :c:func:`Py_GetPath`: get :data:`sys.path` - * :c:func:`Py_GetPrefix`: get :data:`sys.prefix` - * :c:func:`Py_GetProgramFullPath`: get :data:`sys.executable` - * :c:func:`Py_GetProgramName`: get :data:`sys.executable` - * :c:func:`Py_GetPythonHome`: get :c:member:`PyConfig.home` or - the :envvar:`PYTHONHOME` environment variable - -Pending Removal in Future Versions -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The following APIs are deprecated and will be removed, -although there is currently no date scheduled for their removal. - -* :c:macro:`Py_TPFLAGS_HAVE_FINALIZE`: unneeded since Python 3.8 -* :c:func:`PyErr_Fetch`: use :c:func:`PyErr_GetRaisedException` -* :c:func:`PyErr_NormalizeException`: use :c:func:`PyErr_GetRaisedException` -* :c:func:`PyErr_Restore`: use :c:func:`PyErr_SetRaisedException` -* :c:func:`PyModule_GetFilename`: use :c:func:`PyModule_GetFilenameObject` -* :c:func:`PyOS_AfterFork`: use :c:func:`PyOS_AfterFork_Child` -* :c:func:`PySlice_GetIndicesEx`: use :c:func:`PySlice_Unpack` and :c:func:`PySlice_AdjustIndices` -* :c:func:`!PyUnicode_AsDecodedObject`: use :c:func:`PyCodec_Decode` -* :c:func:`!PyUnicode_AsDecodedUnicode`: use :c:func:`PyCodec_Decode` -* :c:func:`!PyUnicode_AsEncodedObject`: use :c:func:`PyCodec_Encode` -* :c:func:`!PyUnicode_AsEncodedUnicode`: use :c:func:`PyCodec_Encode` -* :c:func:`PyUnicode_READY`: unneeded since Python 3.12 -* :c:func:`!PyErr_Display`: use :c:func:`PyErr_DisplayException` -* :c:func:`!_PyErr_ChainExceptions`: use ``_PyErr_ChainExceptions1`` -* :c:member:`!PyBytesObject.ob_shash` member: - call :c:func:`PyObject_Hash` instead -* :c:member:`!PyDictObject.ma_version_tag` member -* Thread Local Storage (TLS) API: - - * :c:func:`PyThread_create_key`: use :c:func:`PyThread_tss_alloc` - * :c:func:`PyThread_delete_key`: use :c:func:`PyThread_tss_free` - * :c:func:`PyThread_set_key_value`: use :c:func:`PyThread_tss_set` - * :c:func:`PyThread_get_key_value`: use :c:func:`PyThread_tss_get` - * :c:func:`PyThread_delete_key_value`: use :c:func:`PyThread_tss_delete` - * :c:func:`PyThread_ReInitTLS`: unneeded since Python 3.7 +.. include:: ../deprecations/c-api-pending-removal-in-future.rst Removed ------- diff --git a/Doc/whatsnew/3.13.rst b/Doc/whatsnew/3.13.rst index e89abfdd292f48..0854631c832ef4 100644 --- a/Doc/whatsnew/3.13.rst +++ b/Doc/whatsnew/3.13.rst @@ -2208,6 +2208,9 @@ Removed C APIs be used instead. (Contributed by Serhiy Storchaka in :gh:`86493`.) +* Remove undocumented ``PY_TIMEOUT_MAX`` constant from the limited C API. + (Contributed by Victor Stinner in :gh:`110014`.) + Deprecated C APIs ----------------- @@ -2249,105 +2252,13 @@ Deprecated C APIs Refer to the deprecation notices on each function for their recommended replacements. (Soft deprecated as part of :pep:`667`.) -Pending Removal in Python 3.14 ------------------------------- - -* Creating immutable types (:c:macro:`Py_TPFLAGS_IMMUTABLETYPE`) with mutable - bases using the C API. - -* Functions to configure the Python initialization, deprecated in Python 3.11: - - * ``PySys_SetArgvEx()``: set :c:member:`PyConfig.argv` instead. - * ``PySys_SetArgv()``: set :c:member:`PyConfig.argv` instead. - * ``Py_SetProgramName()``: set :c:member:`PyConfig.program_name` instead. - * ``Py_SetPythonHome()``: set :c:member:`PyConfig.home` instead. - - The :c:func:`Py_InitializeFromConfig` API should be used with - :c:type:`PyConfig` instead. - -* Global configuration variables: - - * :c:var:`Py_DebugFlag`: use :c:member:`PyConfig.parser_debug` - * :c:var:`Py_VerboseFlag`: use :c:member:`PyConfig.verbose` - * :c:var:`Py_QuietFlag`: use :c:member:`PyConfig.quiet` - * :c:var:`Py_InteractiveFlag`: use :c:member:`PyConfig.interactive` - * :c:var:`Py_InspectFlag`: use :c:member:`PyConfig.inspect` - * :c:var:`Py_OptimizeFlag`: use :c:member:`PyConfig.optimization_level` - * :c:var:`Py_NoSiteFlag`: use :c:member:`PyConfig.site_import` - * :c:var:`Py_BytesWarningFlag`: use :c:member:`PyConfig.bytes_warning` - * :c:var:`Py_FrozenFlag`: use :c:member:`PyConfig.pathconfig_warnings` - * :c:var:`Py_IgnoreEnvironmentFlag`: use :c:member:`PyConfig.use_environment` - * :c:var:`Py_DontWriteBytecodeFlag`: use :c:member:`PyConfig.write_bytecode` - * :c:var:`Py_NoUserSiteDirectory`: use :c:member:`PyConfig.user_site_directory` - * :c:var:`Py_UnbufferedStdioFlag`: use :c:member:`PyConfig.buffered_stdio` - * :c:var:`Py_HashRandomizationFlag`: use :c:member:`PyConfig.use_hash_seed` - and :c:member:`PyConfig.hash_seed` - * :c:var:`Py_IsolatedFlag`: use :c:member:`PyConfig.isolated` - * :c:var:`Py_LegacyWindowsFSEncodingFlag`: use :c:member:`PyPreConfig.legacy_windows_fs_encoding` - * :c:var:`Py_LegacyWindowsStdioFlag`: use :c:member:`PyConfig.legacy_windows_stdio` - * :c:var:`!Py_FileSystemDefaultEncoding`: use :c:member:`PyConfig.filesystem_encoding` - * :c:var:`!Py_HasFileSystemDefaultEncoding`: use :c:member:`PyConfig.filesystem_encoding` - * :c:var:`!Py_FileSystemDefaultEncodeErrors`: use :c:member:`PyConfig.filesystem_errors` - * :c:var:`!Py_UTF8Mode`: use :c:member:`PyPreConfig.utf8_mode` (see :c:func:`Py_PreInitialize`) - - The :c:func:`Py_InitializeFromConfig` API should be used with - :c:type:`PyConfig` instead. - -Pending Removal in Python 3.15 ------------------------------- - -* The bundled copy of ``libmpdecimal``. -* :c:func:`PyImport_ImportModuleNoBlock`: use :c:func:`PyImport_ImportModule`. -* :c:func:`PyWeakref_GET_OBJECT`: use :c:func:`PyWeakref_GetRef` instead. -* :c:func:`PyWeakref_GetObject`: use :c:func:`PyWeakref_GetRef` instead. -* :c:type:`!Py_UNICODE_WIDE` type: use :c:type:`wchar_t` instead. -* :c:type:`Py_UNICODE` type: use :c:type:`wchar_t` instead. -* Python initialization functions: - - * :c:func:`PySys_ResetWarnOptions`: clear :data:`sys.warnoptions` and - :data:`!warnings.filters` instead. - * :c:func:`Py_GetExecPrefix`: get :data:`sys.exec_prefix` instead. - * :c:func:`Py_GetPath`: get :data:`sys.path` instead. - * :c:func:`Py_GetPrefix`: get :data:`sys.prefix` instead. - * :c:func:`Py_GetProgramFullPath`: get :data:`sys.executable` instead. - * :c:func:`Py_GetProgramName`: get :data:`sys.executable` instead. - * :c:func:`Py_GetPythonHome`: get :c:member:`PyConfig.home` or - :envvar:`PYTHONHOME` environment variable instead. +.. Add deprecations above alphabetically, not here at the end. -Pending Removal in Future Versions ----------------------------------- - -The following APIs were deprecated in earlier Python versions and will be -removed, although there is currently no date scheduled for their removal. - -* :c:macro:`Py_TPFLAGS_HAVE_FINALIZE`: no needed since Python 3.8. -* :c:func:`PyErr_Fetch`: use :c:func:`PyErr_GetRaisedException`. -* :c:func:`PyErr_NormalizeException`: use :c:func:`PyErr_GetRaisedException`. -* :c:func:`PyErr_Restore`: use :c:func:`PyErr_SetRaisedException`. -* :c:func:`PyModule_GetFilename`: use :c:func:`PyModule_GetFilenameObject`. -* :c:func:`PyOS_AfterFork`: use :c:func:`PyOS_AfterFork_Child()`. -* :c:func:`PySlice_GetIndicesEx`. -* :c:func:`!PyUnicode_AsDecodedObject`. -* :c:func:`!PyUnicode_AsDecodedUnicode`. -* :c:func:`!PyUnicode_AsEncodedObject`. -* :c:func:`!PyUnicode_AsEncodedUnicode`. -* :c:func:`PyUnicode_READY`: not needed since Python 3.12. -* :c:func:`!_PyErr_ChainExceptions`. -* :c:member:`!PyBytesObject.ob_shash` member: - call :c:func:`PyObject_Hash` instead. -* :c:member:`!PyDictObject.ma_version_tag` member. -* TLS API: - - * :c:func:`PyThread_create_key`: use :c:func:`PyThread_tss_alloc`. - * :c:func:`PyThread_delete_key`: use :c:func:`PyThread_tss_free`. - * :c:func:`PyThread_set_key_value`: use :c:func:`PyThread_tss_set`. - * :c:func:`PyThread_get_key_value`: use :c:func:`PyThread_tss_get`. - * :c:func:`PyThread_delete_key_value`: use :c:func:`PyThread_tss_delete`. - * :c:func:`PyThread_ReInitTLS`: no longer needed. +.. include:: ../deprecations/c-api-pending-removal-in-3.14.rst -* Remove undocumented ``PY_TIMEOUT_MAX`` constant from the limited C API. - (Contributed by Victor Stinner in :gh:`110014`.) +.. include:: ../deprecations/c-api-pending-removal-in-3.15.rst +.. include:: ../deprecations/c-api-pending-removal-in-future.rst Regression Test Changes ======================= diff --git a/Doc/whatsnew/3.14.rst b/Doc/whatsnew/3.14.rst index 7450597e8597ad..aecc7cabd0d1f5 100644 --- a/Doc/whatsnew/3.14.rst +++ b/Doc/whatsnew/3.14.rst @@ -422,6 +422,12 @@ Deprecated :c:macro:`!isfinite` available from :file:`math.h` since C99. (Contributed by Sergey B Kirpichev in :gh:`119613`.) +.. Add deprecations above alphabetically, not here at the end. + +.. include:: ../deprecations/c-api-pending-removal-in-3.15.rst + +.. include:: ../deprecations/c-api-pending-removal-in-future.rst + Removed ------- From 78df1043dbdce5c989600616f9f87b4ee72944e5 Mon Sep 17 00:00:00 2001 From: Seth Michael Larson Date: Mon, 29 Jul 2024 16:44:35 -0500 Subject: [PATCH 16/70] gh-122133: Authenticate socket connection for `socket.socketpair()` fallback (GH-122134) * Authenticate socket connection for `socket.socketpair()` fallback when the platform does not have a native `socketpair` C API. We authenticate in-process using `getsocketname` and `getpeername` (thanks to Nathaniel J Smith for that suggestion). Co-authored-by: Gregory P. Smith --- Lib/socket.py | 17 +++ Lib/test/test_socket.py | 128 +++++++++++++++++- ...-07-22-13-11-28.gh-issue-122133.0mPeta.rst | 5 + 3 files changed, 147 insertions(+), 3 deletions(-) create mode 100644 Misc/NEWS.d/next/Security/2024-07-22-13-11-28.gh-issue-122133.0mPeta.rst diff --git a/Lib/socket.py b/Lib/socket.py index 524ce1361b9091..2e6043cbdb8005 100644 --- a/Lib/socket.py +++ b/Lib/socket.py @@ -650,6 +650,23 @@ def socketpair(family=AF_INET, type=SOCK_STREAM, proto=0): raise finally: lsock.close() + + # Authenticating avoids using a connection from something else + # able to connect to {host}:{port} instead of us. + # We expect only AF_INET and AF_INET6 families. + try: + if ( + ssock.getsockname() != csock.getpeername() + or csock.getsockname() != ssock.getpeername() + ): + raise ConnectionError("Unexpected peer connection") + except: + # getsockname() and getpeername() can fail + # if either socket isn't connected. + ssock.close() + csock.close() + raise + return (ssock, csock) __all__.append("socketpair") diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py index ce0f64b43ed49f..bb65c3c5993b88 100644 --- a/Lib/test/test_socket.py +++ b/Lib/test/test_socket.py @@ -592,19 +592,27 @@ class SocketPairTest(unittest.TestCase, ThreadableTest): def __init__(self, methodName='runTest'): unittest.TestCase.__init__(self, methodName=methodName) ThreadableTest.__init__(self) + self.cli = None + self.serv = None + + def socketpair(self): + # To be overridden by some child classes. + return socket.socketpair() def setUp(self): - self.serv, self.cli = socket.socketpair() + self.serv, self.cli = self.socketpair() def tearDown(self): - self.serv.close() + if self.serv: + self.serv.close() self.serv = None def clientSetUp(self): pass def clientTearDown(self): - self.cli.close() + if self.cli: + self.cli.close() self.cli = None ThreadableTest.clientTearDown(self) @@ -4852,6 +4860,120 @@ def _testSend(self): self.assertEqual(msg, MSG) +class PurePythonSocketPairTest(SocketPairTest): + + # Explicitly use socketpair AF_INET or AF_INET6 to ensure that is the + # code path we're using regardless platform is the pure python one where + # `_socket.socketpair` does not exist. (AF_INET does not work with + # _socket.socketpair on many platforms). + def socketpair(self): + # called by super().setUp(). + try: + return socket.socketpair(socket.AF_INET6) + except OSError: + return socket.socketpair(socket.AF_INET) + + # Local imports in this class make for easy security fix backporting. + + def setUp(self): + import _socket + self._orig_sp = getattr(_socket, 'socketpair', None) + if self._orig_sp is not None: + # This forces the version using the non-OS provided socketpair + # emulation via an AF_INET socket in Lib/socket.py. + del _socket.socketpair + import importlib + global socket + socket = importlib.reload(socket) + else: + pass # This platform already uses the non-OS provided version. + super().setUp() + + def tearDown(self): + super().tearDown() + import _socket + if self._orig_sp is not None: + # Restore the default socket.socketpair definition. + _socket.socketpair = self._orig_sp + import importlib + global socket + socket = importlib.reload(socket) + + def test_recv(self): + msg = self.serv.recv(1024) + self.assertEqual(msg, MSG) + + def _test_recv(self): + self.cli.send(MSG) + + def test_send(self): + self.serv.send(MSG) + + def _test_send(self): + msg = self.cli.recv(1024) + self.assertEqual(msg, MSG) + + def test_ipv4(self): + cli, srv = socket.socketpair(socket.AF_INET) + cli.close() + srv.close() + + def _test_ipv4(self): + pass + + @unittest.skipIf(not hasattr(_socket, 'IPPROTO_IPV6') or + not hasattr(_socket, 'IPV6_V6ONLY'), + "IPV6_V6ONLY option not supported") + @unittest.skipUnless(socket_helper.IPV6_ENABLED, 'IPv6 required for this test') + def test_ipv6(self): + cli, srv = socket.socketpair(socket.AF_INET6) + cli.close() + srv.close() + + def _test_ipv6(self): + pass + + def test_injected_authentication_failure(self): + orig_getsockname = socket.socket.getsockname + inject_sock = None + + def inject_getsocketname(self): + nonlocal inject_sock + sockname = orig_getsockname(self) + # Connect to the listening socket ahead of the + # client socket. + if inject_sock is None: + inject_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + inject_sock.setblocking(False) + try: + inject_sock.connect(sockname[:2]) + except (BlockingIOError, InterruptedError): + pass + inject_sock.setblocking(True) + return sockname + + sock1 = sock2 = None + try: + socket.socket.getsockname = inject_getsocketname + with self.assertRaises(OSError): + sock1, sock2 = socket.socketpair() + finally: + socket.socket.getsockname = orig_getsockname + if inject_sock: + inject_sock.close() + if sock1: # This cleanup isn't needed on a successful test. + sock1.close() + if sock2: + sock2.close() + + def _test_injected_authentication_failure(self): + # No-op. Exists for base class threading infrastructure to call. + # We could refactor this test into its own lesser class along with the + # setUp and tearDown code to construct an ideal; it is simpler to keep + # it here and live with extra overhead one this _one_ failure test. + pass + + class NonBlockingTCPTests(ThreadedTCPSocketTest): def __init__(self, methodName='runTest'): diff --git a/Misc/NEWS.d/next/Security/2024-07-22-13-11-28.gh-issue-122133.0mPeta.rst b/Misc/NEWS.d/next/Security/2024-07-22-13-11-28.gh-issue-122133.0mPeta.rst new file mode 100644 index 00000000000000..3544eb3824d0da --- /dev/null +++ b/Misc/NEWS.d/next/Security/2024-07-22-13-11-28.gh-issue-122133.0mPeta.rst @@ -0,0 +1,5 @@ +Authenticate the socket connection for the ``socket.socketpair()`` fallback +on platforms where ``AF_UNIX`` is not available like Windows. + +Patch by Gregory P. Smith and Seth Larson . Reported by Ellie + From 7797182b78baf78f64fe16f436aa2279cf6afc23 Mon Sep 17 00:00:00 2001 From: Brandt Bucher Date: Mon, 29 Jul 2024 14:49:17 -0700 Subject: [PATCH 17/70] GH-118093: Improve handling of short and mid-loop traces (GH-122252) --- Objects/genobject.c | 5 ++-- Python/optimizer.c | 64 ++++++++++++++++++++++----------------------- 2 files changed, 34 insertions(+), 35 deletions(-) diff --git a/Objects/genobject.c b/Objects/genobject.c index c204ac04b480a4..b281af8d7e1f4e 100644 --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -342,8 +342,9 @@ _PyGen_yf(PyGenObject *gen) { if (gen->gi_frame_state == FRAME_SUSPENDED_YIELD_FROM) { _PyInterpreterFrame *frame = &gen->gi_iframe; - assert(is_resume(frame->instr_ptr)); - assert((frame->instr_ptr->op.arg & RESUME_OPARG_LOCATION_MASK) >= RESUME_AFTER_YIELD_FROM); + // GH-122390: These asserts are wrong in the presence of ENTER_EXECUTOR! + // assert(is_resume(frame->instr_ptr)); + // assert((frame->instr_ptr->op.arg & RESUME_OPARG_LOCATION_MASK) >= RESUME_AFTER_YIELD_FROM); return PyStackRef_AsPyObjectNew(_PyFrame_StackPeek(frame)); } return NULL; diff --git a/Python/optimizer.c b/Python/optimizer.c index 7b875af2aae898..ce8a36575cde1d 100644 --- a/Python/optimizer.c +++ b/Python/optimizer.c @@ -503,8 +503,7 @@ add_to_trace( if (trace_stack_depth >= TRACE_STACK_SIZE) { \ DPRINTF(2, "Trace stack overflow\n"); \ OPT_STAT_INC(trace_stack_overflow); \ - trace_length = 0; \ - goto done; \ + return 0; \ } \ assert(func == NULL || func->func_code == (PyObject *)code); \ trace_stack[trace_stack_depth].func = func; \ @@ -550,6 +549,7 @@ translate_bytecode_to_trace( } trace_stack[TRACE_STACK_SIZE]; int trace_stack_depth = 0; int confidence = CONFIDENCE_RANGE; // Adjusted by branch instructions + bool jump_seen = false; #ifdef Py_DEBUG char *python_lltrace = Py_GETENV("PYTHON_LLTRACE"); @@ -568,7 +568,6 @@ translate_bytecode_to_trace( ADD_TO_TRACE(_START_EXECUTOR, 0, (uintptr_t)instr, INSTR_IP(instr, code)); uint32_t target = 0; -top: // Jump here after _PUSH_FRAME or likely branches for (;;) { target = INSTR_IP(instr, code); // Need space for _DEOPT @@ -577,6 +576,13 @@ translate_bytecode_to_trace( uint32_t opcode = instr->op.code; uint32_t oparg = instr->op.arg; + if (!progress_needed && instr == initial_instr) { + // We have looped around to the start: + RESERVE(1); + ADD_TO_TRACE(_JUMP_TO_TOP, 0, 0, 0); + goto done; + } + DPRINTF(2, "%d: %s(%d)\n", target, _PyOpcode_OpName[opcode], oparg); if (opcode == ENTER_EXECUTOR) { @@ -603,30 +609,21 @@ translate_bytecode_to_trace( /* Special case the first instruction, * so that we can guarantee forward progress */ if (progress_needed) { - progress_needed = false; - if (opcode == JUMP_BACKWARD || opcode == JUMP_BACKWARD_NO_INTERRUPT) { - instr += 1 + _PyOpcode_Caches[opcode] - (int32_t)oparg; - initial_instr = instr; - if (opcode == JUMP_BACKWARD) { - ADD_TO_TRACE(_TIER2_RESUME_CHECK, 0, 0, target); - } - continue; - } - else { - if (OPCODE_HAS_EXIT(opcode) || OPCODE_HAS_DEOPT(opcode)) { - opcode = _PyOpcode_Deopt[opcode]; - } - assert(!OPCODE_HAS_EXIT(opcode)); - assert(!OPCODE_HAS_DEOPT(opcode)); + if (OPCODE_HAS_EXIT(opcode) || OPCODE_HAS_DEOPT(opcode)) { + opcode = _PyOpcode_Deopt[opcode]; } + assert(!OPCODE_HAS_EXIT(opcode)); + assert(!OPCODE_HAS_DEOPT(opcode)); } if (OPCODE_HAS_EXIT(opcode)) { - // Make space for exit code + // Make space for side exit and final _EXIT_TRACE: + RESERVE_RAW(2, "_EXIT_TRACE"); max_length--; } if (OPCODE_HAS_ERROR(opcode)) { - // Make space for error code + // Make space for error stub and final _EXIT_TRACE: + RESERVE_RAW(2, "_ERROR_POP_N"); max_length--; } switch (opcode) { @@ -672,19 +669,18 @@ translate_bytecode_to_trace( } case JUMP_BACKWARD: + ADD_TO_TRACE(_CHECK_PERIODIC, 0, 0, target); + _Py_FALLTHROUGH; case JUMP_BACKWARD_NO_INTERRUPT: { - _Py_CODEUNIT *target = instr + 1 + _PyOpcode_Caches[opcode] - (int)oparg; - if (target == initial_instr) { - /* We have looped round to the start */ - RESERVE(1); - ADD_TO_TRACE(_JUMP_TO_TOP, 0, 0, 0); - } - else { + instr += 1 + _PyOpcode_Caches[_PyOpcode_Deopt[opcode]] - (int)oparg; + if (jump_seen) { OPT_STAT_INC(inner_loop); DPRINTF(2, "JUMP_BACKWARD not to top ends trace\n"); + goto done; } - goto done; + jump_seen = true; + goto top; } case JUMP_FORWARD: @@ -904,6 +900,9 @@ translate_bytecode_to_trace( assert(instr->op.code == POP_TOP); instr++; } + top: + // Jump here after _PUSH_FRAME or likely branches. + progress_needed = false; } // End for (;;) done: @@ -911,16 +910,15 @@ translate_bytecode_to_trace( TRACE_STACK_POP(); } assert(code == initial_code); - // Skip short traces like _SET_IP, LOAD_FAST, _SET_IP, _EXIT_TRACE - if (progress_needed || trace_length < 5) { + // Skip short traces where we can't even translate a single instruction: + if (progress_needed) { OPT_STAT_INC(trace_too_short); DPRINTF(2, - "No trace for %s (%s:%d) at byte offset %d (%s)\n", + "No trace for %s (%s:%d) at byte offset %d (no progress)\n", PyUnicode_AsUTF8(code->co_qualname), PyUnicode_AsUTF8(code->co_filename), code->co_firstlineno, - 2 * INSTR_IP(initial_instr, code), - progress_needed ? "no progress" : "too short"); + 2 * INSTR_IP(initial_instr, code)); return 0; } if (trace[trace_length-1].opcode != _JUMP_TO_TOP) { From ac8da34621a574cd5773217404757a294025ba49 Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Mon, 29 Jul 2024 18:15:03 -0400 Subject: [PATCH 18/70] gh-122420: Fix accounting for immortal interned strings in refleak.py (GH-122421) The `_PyUnicode_Intern*` functions already adjust the total refcount, so we don't want to readjust it in refleak.py. --- Lib/test/libregrtest/refleak.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Lib/test/libregrtest/refleak.py b/Lib/test/libregrtest/refleak.py index 20b05954c762ff..fa447a4336a399 100644 --- a/Lib/test/libregrtest/refleak.py +++ b/Lib/test/libregrtest/refleak.py @@ -145,7 +145,7 @@ def get_pooled_int(value): # Use an internal-only keyword argument that mypy doesn't know yet _only_immortal=True) # type: ignore[call-arg] alloc_after = getallocatedblocks() - interned_immortal_after - rc_after = gettotalrefcount() - interned_immortal_after * 2 + rc_after = gettotalrefcount() fd_after = fd_count() rc_deltas[i] = get_pooled_int(rc_after - rc_before) From 11ad731f4fa096c8b5d71731f1182d893a88c9b4 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Tue, 30 Jul 2024 04:49:00 +0100 Subject: [PATCH 19/70] GH-121970: Extract ``audit_events`` into a new extension (#122325) --- Doc/conf.py | 1 + Doc/tools/extensions/audit_events.py | 262 +++++++++++++++++++++++++++ Doc/tools/extensions/pyspecific.py | 207 --------------------- 3 files changed, 263 insertions(+), 207 deletions(-) create mode 100644 Doc/tools/extensions/audit_events.py diff --git a/Doc/conf.py b/Doc/conf.py index 4841b69e380085..3860d146a27e85 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -20,6 +20,7 @@ # --------------------- extensions = [ + 'audit_events', 'c_annotations', 'glossary_search', 'lexers', diff --git a/Doc/tools/extensions/audit_events.py b/Doc/tools/extensions/audit_events.py new file mode 100644 index 00000000000000..d0f08522d21ea2 --- /dev/null +++ b/Doc/tools/extensions/audit_events.py @@ -0,0 +1,262 @@ +"""Support for documenting audit events.""" + +from __future__ import annotations + +import re +from typing import TYPE_CHECKING + +from docutils import nodes +from sphinx.errors import NoUri +from sphinx.locale import _ as sphinx_gettext +from sphinx.transforms.post_transforms import SphinxPostTransform +from sphinx.util import logging +from sphinx.util.docutils import SphinxDirective + +if TYPE_CHECKING: + from collections.abc import Iterator + + from sphinx.application import Sphinx + from sphinx.builders import Builder + from sphinx.environment import BuildEnvironment + +logger = logging.getLogger(__name__) + +# This list of sets are allowable synonyms for event argument names. +# If two names are in the same set, they are treated as equal for the +# purposes of warning. This won't help if the number of arguments is +# different! +_SYNONYMS = [ + frozenset({"file", "path", "fd"}), +] + + +class AuditEvents: + def __init__(self) -> None: + self.events: dict[str, list[str]] = {} + self.sources: dict[str, list[tuple[str, str]]] = {} + + def __iter__(self) -> Iterator[tuple[str, list[str], tuple[str, str]]]: + for name, args in self.events.items(): + for source in self.sources[name]: + yield name, args, source + + def add_event( + self, name, args: list[str], source: tuple[str, str] + ) -> None: + if name in self.events: + self._check_args_match(name, args) + else: + self.events[name] = args + self.sources.setdefault(name, []).append(source) + + def _check_args_match(self, name: str, args: list[str]) -> None: + current_args = self.events[name] + msg = ( + f"Mismatched arguments for audit-event {name}: " + f"{current_args!r} != {args!r}" + ) + if current_args == args: + return + if len(current_args) != len(args): + logger.warning(msg) + return + for a1, a2 in zip(current_args, args, strict=False): + if a1 == a2: + continue + if any(a1 in s and a2 in s for s in _SYNONYMS): + continue + logger.warning(msg) + return + + def id_for(self, name) -> str: + source_count = len(self.sources.get(name, ())) + name_clean = re.sub(r"\W", "_", name) + return f"audit_event_{name_clean}_{source_count}" + + def rows(self) -> Iterator[tuple[str, list[str], list[tuple[str, str]]]]: + for name in sorted(self.events.keys()): + yield name, self.events[name], self.sources[name] + + +def initialise_audit_events(app: Sphinx) -> None: + """Initialise the audit_events attribute on the environment.""" + if not hasattr(app.env, "audit_events"): + app.env.audit_events = AuditEvents() + + +def audit_events_purge( + app: Sphinx, env: BuildEnvironment, docname: str +) -> None: + """This is to remove traces of removed documents from env.audit_events.""" + fresh_audit_events = AuditEvents() + for name, args, (doc, target) in env.audit_events: + if doc != docname: + fresh_audit_events.add_event(name, args, (doc, target)) + + +def audit_events_merge( + app: Sphinx, + env: BuildEnvironment, + docnames: list[str], + other: BuildEnvironment, +) -> None: + """In Sphinx parallel builds, this merges audit_events from subprocesses.""" + for name, args, source in other.audit_events: + env.audit_events.add_event(name, args, source) + + +class AuditEvent(SphinxDirective): + has_content = True + required_arguments = 1 + optional_arguments = 2 + final_argument_whitespace = True + + _label = [ + sphinx_gettext( + "Raises an :ref:`auditing event ` " + "{name} with no arguments." + ), + sphinx_gettext( + "Raises an :ref:`auditing event ` " + "{name} with argument {args}." + ), + sphinx_gettext( + "Raises an :ref:`auditing event ` " + "{name} with arguments {args}." + ), + ] + + def run(self) -> list[nodes.paragraph]: + name = self.arguments[0] + if len(self.arguments) >= 2 and self.arguments[1]: + args = [ + arg + for argument in self.arguments[1].strip("'\"").split(",") + if (arg := argument.strip()) + ] + else: + args = [] + ids = [] + try: + target = self.arguments[2].strip("\"'") + except (IndexError, TypeError): + target = None + if not target: + target = self.env.audit_events.id_for(name) + ids.append(target) + self.env.audit_events.add_event(name, args, (self.env.docname, target)) + + node = nodes.paragraph("", classes=["audit-hook"], ids=ids) + self.set_source_info(node) + if self.content: + self.state.nested_parse(self.content, self.content_offset, node) + else: + num_args = min(2, len(args)) + text = self._label[num_args].format( + name=f"``{name}``", + args=", ".join(f"``{a}``" for a in args), + ) + parsed, messages = self.state.inline_text(text, self.lineno) + node += parsed + node += messages + return [node] + + +class audit_event_list(nodes.General, nodes.Element): # noqa: N801 + pass + + +class AuditEventListDirective(SphinxDirective): + def run(self) -> list[audit_event_list]: + return [audit_event_list()] + + +class AuditEventListTransform(SphinxPostTransform): + default_priority = 500 + + def run(self) -> None: + if self.document.next_node(audit_event_list) is None: + return + + table = self._make_table(self.app.builder, self.env.docname) + for node in self.document.findall(audit_event_list): + node.replace_self(table) + + def _make_table(self, builder: Builder, docname: str) -> nodes.table: + table = nodes.table(cols=3) + group = nodes.tgroup( + "", + nodes.colspec(colwidth=30), + nodes.colspec(colwidth=55), + nodes.colspec(colwidth=15), + cols=3, + ) + head = nodes.thead() + body = nodes.tbody() + + table += group + group += head + group += body + + head += nodes.row( + "", + nodes.entry("", nodes.paragraph("", "Audit event")), + nodes.entry("", nodes.paragraph("", "Arguments")), + nodes.entry("", nodes.paragraph("", "References")), + ) + + for name, args, sources in builder.env.audit_events.rows(): + body += self._make_row(builder, docname, name, args, sources) + + return table + + @staticmethod + def _make_row( + builder: Builder, + docname: str, + name: str, + args: list[str], + sources: list[tuple[str, str]], + ) -> nodes.row: + row = nodes.row() + name_node = nodes.paragraph("", nodes.Text(name)) + row += nodes.entry("", name_node) + + args_node = nodes.paragraph() + for arg in args: + args_node += nodes.literal(arg, arg) + args_node += nodes.Text(", ") + if len(args_node.children) > 0: + args_node.children.pop() # remove trailing comma + row += nodes.entry("", args_node) + + backlinks_node = nodes.paragraph() + backlinks = enumerate(sorted(set(sources)), start=1) + for i, (doc, label) in backlinks: + if isinstance(label, str): + ref = nodes.reference("", f"[{i}]", internal=True) + try: + target = ( + f"{builder.get_relative_uri(docname, doc)}#{label}" + ) + except NoUri: + continue + else: + ref["refuri"] = target + backlinks_node += ref + row += nodes.entry("", backlinks_node) + return row + + +def setup(app: Sphinx): + app.add_directive("audit-event", AuditEvent) + app.add_directive("audit-event-table", AuditEventListDirective) + app.add_post_transform(AuditEventListTransform) + app.connect("builder-inited", initialise_audit_events) + app.connect("env-purge-doc", audit_events_purge) + app.connect("env-merge-info", audit_events_merge) + return { + "version": "1.0", + "parallel_read_safe": True, + "parallel_write_safe": True, + } diff --git a/Doc/tools/extensions/pyspecific.py b/Doc/tools/extensions/pyspecific.py index f5be19a8d49cc9..791d9296a975e7 100644 --- a/Doc/tools/extensions/pyspecific.py +++ b/Doc/tools/extensions/pyspecific.py @@ -15,7 +15,6 @@ from time import asctime from pprint import pformat -import sphinx from docutils import nodes from docutils.io import StringOutput from docutils.parsers.rst import directives @@ -24,7 +23,6 @@ from sphinx.builders import Builder from sphinx.domains.changeset import VersionChange, versionlabels, versionlabel_classes from sphinx.domains.python import PyFunction, PyMethod, PyModule -from sphinx.errors import NoUri from sphinx.locale import _ as sphinx_gettext from sphinx.util import logging from sphinx.util.docutils import SphinxDirective @@ -184,142 +182,6 @@ def parse_platforms(self): return platforms -# Support for documenting audit event - -def audit_events_purge(app, env, docname): - """This is to remove from env.all_audit_events old traces of removed - documents. - """ - if not hasattr(env, 'all_audit_events'): - return - fresh_all_audit_events = {} - for name, event in env.all_audit_events.items(): - event["source"] = [(d, t) for d, t in event["source"] if d != docname] - if event["source"]: - # Only keep audit_events that have at least one source. - fresh_all_audit_events[name] = event - env.all_audit_events = fresh_all_audit_events - - -def audit_events_merge(app, env, docnames, other): - """In Sphinx parallel builds, this merges env.all_audit_events from - subprocesses. - - all_audit_events is a dict of names, with values like: - {'source': [(docname, target), ...], 'args': args} - """ - if not hasattr(other, 'all_audit_events'): - return - if not hasattr(env, 'all_audit_events'): - env.all_audit_events = {} - for name, value in other.all_audit_events.items(): - if name in env.all_audit_events: - env.all_audit_events[name]["source"].extend(value["source"]) - else: - env.all_audit_events[name] = value - - -class AuditEvent(SphinxDirective): - - has_content = True - required_arguments = 1 - optional_arguments = 2 - final_argument_whitespace = True - - _label = [ - sphinx_gettext("Raises an :ref:`auditing event ` {name} with no arguments."), - sphinx_gettext("Raises an :ref:`auditing event ` {name} with argument {args}."), - sphinx_gettext("Raises an :ref:`auditing event ` {name} with arguments {args}."), - ] - - @property - def logger(self): - cls = type(self) - return logging.getLogger(cls.__module__ + "." + cls.__name__) - - def run(self): - name = self.arguments[0] - if len(self.arguments) >= 2 and self.arguments[1]: - args = (a.strip() for a in self.arguments[1].strip("'\"").split(",")) - args = [a for a in args if a] - else: - args = [] - - label = self._label[min(2, len(args))] - text = label.format(name="``{}``".format(name), - args=", ".join("``{}``".format(a) for a in args if a)) - - if not hasattr(self.env, 'all_audit_events'): - self.env.all_audit_events = {} - - new_info = { - 'source': [], - 'args': args - } - info = self.env.all_audit_events.setdefault(name, new_info) - if info is not new_info: - if not self._do_args_match(info['args'], new_info['args']): - self.logger.warning( - "Mismatched arguments for audit-event {}: {!r} != {!r}" - .format(name, info['args'], new_info['args']) - ) - - ids = [] - try: - target = self.arguments[2].strip("\"'") - except (IndexError, TypeError): - target = None - if not target: - target = "audit_event_{}_{}".format( - re.sub(r'\W', '_', name), - len(info['source']), - ) - ids.append(target) - - info['source'].append((self.env.docname, target)) - - pnode = nodes.paragraph(text, classes=["audit-hook"], ids=ids) - pnode.line = self.lineno - if self.content: - self.state.nested_parse(self.content, self.content_offset, pnode) - else: - n, m = self.state.inline_text(text, self.lineno) - pnode.extend(n + m) - - return [pnode] - - # This list of sets are allowable synonyms for event argument names. - # If two names are in the same set, they are treated as equal for the - # purposes of warning. This won't help if number of arguments is - # different! - _SYNONYMS = [ - {"file", "path", "fd"}, - ] - - def _do_args_match(self, args1, args2): - if args1 == args2: - return True - if len(args1) != len(args2): - return False - for a1, a2 in zip(args1, args2): - if a1 == a2: - continue - if any(a1 in s and a2 in s for s in self._SYNONYMS): - continue - return False - return True - - -class audit_event_list(nodes.General, nodes.Element): - pass - - -class AuditEventListDirective(SphinxDirective): - - def run(self): - return [audit_event_list('')] - - # Support for documenting decorators class PyDecoratorMixin(object): @@ -583,70 +445,6 @@ def parse_monitoring_event(env, sig, signode): return sig -def process_audit_events(app, doctree, fromdocname): - for node in doctree.findall(audit_event_list): - break - else: - return - - env = app.builder.env - - table = nodes.table(cols=3) - group = nodes.tgroup( - '', - nodes.colspec(colwidth=30), - nodes.colspec(colwidth=55), - nodes.colspec(colwidth=15), - cols=3, - ) - head = nodes.thead() - body = nodes.tbody() - - table += group - group += head - group += body - - row = nodes.row() - row += nodes.entry('', nodes.paragraph('', nodes.Text('Audit event'))) - row += nodes.entry('', nodes.paragraph('', nodes.Text('Arguments'))) - row += nodes.entry('', nodes.paragraph('', nodes.Text('References'))) - head += row - - for name in sorted(getattr(env, "all_audit_events", ())): - audit_event = env.all_audit_events[name] - - row = nodes.row() - node = nodes.paragraph('', nodes.Text(name)) - row += nodes.entry('', node) - - node = nodes.paragraph() - for i, a in enumerate(audit_event['args']): - if i: - node += nodes.Text(", ") - node += nodes.literal(a, nodes.Text(a)) - row += nodes.entry('', node) - - node = nodes.paragraph() - backlinks = enumerate(sorted(set(audit_event['source'])), start=1) - for i, (doc, label) in backlinks: - if isinstance(label, str): - ref = nodes.reference("", nodes.Text("[{}]".format(i)), internal=True) - try: - ref['refuri'] = "{}#{}".format( - app.builder.get_relative_uri(fromdocname, doc), - label, - ) - except NoUri: - continue - node += ref - row += nodes.entry('', node) - - body += row - - for node in doctree.findall(audit_event_list): - node.replace_self(table) - - def patch_pairindextypes(app, _env) -> None: """Remove all entries from ``pairindextypes`` before writing POT files. @@ -676,8 +474,6 @@ def setup(app): app.add_role('gh', gh_issue_role) app.add_directive('impl-detail', ImplementationDetail) app.add_directive('availability', Availability) - app.add_directive('audit-event', AuditEvent) - app.add_directive('audit-event-table', AuditEventListDirective) app.add_directive('deprecated-removed', DeprecatedRemoved) app.add_builder(PydocTopicsBuilder) app.add_object_type('opcode', 'opcode', '%s (opcode)', parse_opcode_signature) @@ -692,7 +488,4 @@ def setup(app): app.add_directive_to_domain('py', 'abstractmethod', PyAbstractMethod) app.add_directive('miscnews', MiscNews) app.connect('env-check-consistency', patch_pairindextypes) - app.connect('doctree-resolved', process_audit_events) - app.connect('env-merge-info', audit_events_merge) - app.connect('env-purge-doc', audit_events_purge) return {'version': '1.0', 'parallel_read_safe': True} From 3833d27f985a62c4709dcd9dc73724fc19d46ebf Mon Sep 17 00:00:00 2001 From: Petr Viktorin Date: Tue, 30 Jul 2024 09:37:58 +0200 Subject: [PATCH 20/70] gh-105733: Soft-deprecate ctypes.ARRAY, rather than hard-deprecating it. (GH-122281) Soft-deprecate ctypes.ARRAY, rather than hard-deprecating it. Partially reverts 2211454fe210637ed7fabda12690dac6cc9a8149 --- Doc/library/ctypes.rst | 9 +++++++++ Doc/whatsnew/3.13.rst | 4 ++-- Lib/ctypes/__init__.py | 2 -- Lib/test/test_ctypes/test_arrays.py | 14 +------------- .../2024-07-25-15-41-14.gh-issue-105733.o3koJA.rst | 2 ++ 5 files changed, 14 insertions(+), 17 deletions(-) create mode 100644 Misc/NEWS.d/next/Library/2024-07-25-15-41-14.gh-issue-105733.o3koJA.rst diff --git a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst index 9e69b3dc51a1ac..c2f928e16aa90c 100644 --- a/Doc/library/ctypes.rst +++ b/Doc/library/ctypes.rst @@ -2688,6 +2688,15 @@ Arrays and pointers Array subclass constructors accept positional arguments, used to initialize the elements in order. +.. function:: ARRAY(type, length) + + Create an array. + Equivalent to ``type * length``, where *type* is a + :mod:`ctypes` data type and *length* an integer. + + This function is :term:`soft deprecated` in favor of multiplication. + There are no plans to remove it. + .. class:: _Pointer diff --git a/Doc/whatsnew/3.13.rst b/Doc/whatsnew/3.13.rst index 0854631c832ef4..fbf19d1c9598e1 100644 --- a/Doc/whatsnew/3.13.rst +++ b/Doc/whatsnew/3.13.rst @@ -1494,8 +1494,8 @@ New Deprecations (Contributed by Hugo van Kemenade in :gh:`80480`.) * :mod:`ctypes`: Deprecate undocumented :func:`!ctypes.SetPointerType` - and :func:`!ctypes.ARRAY` functions. - Replace ``ctypes.ARRAY(item_type, size)`` with ``item_type * size``. + function. :term:`Soft-deprecate ` the :func:`ctypes.ARRAY` + function in favor of multiplication. (Contributed by Victor Stinner in :gh:`105733`.) * :mod:`decimal`: Deprecate non-standard format specifier "N" for diff --git a/Lib/ctypes/__init__.py b/Lib/ctypes/__init__.py index 721522caeeac92..cb3a61287bfe5d 100644 --- a/Lib/ctypes/__init__.py +++ b/Lib/ctypes/__init__.py @@ -324,8 +324,6 @@ def SetPointerType(pointer, cls): del _pointer_type_cache[id(pointer)] def ARRAY(typ, len): - import warnings - warnings._deprecated("ctypes.ARRAY", remove=(3, 15)) return typ * len ################################################################ diff --git a/Lib/test/test_ctypes/test_arrays.py b/Lib/test/test_ctypes/test_arrays.py index 6846773d7069ae..c80fdff5de685d 100644 --- a/Lib/test/test_ctypes/test_arrays.py +++ b/Lib/test/test_ctypes/test_arrays.py @@ -1,8 +1,7 @@ import ctypes import sys import unittest -import warnings -from ctypes import (Structure, Array, sizeof, addressof, +from ctypes import (Structure, Array, ARRAY, sizeof, addressof, create_string_buffer, create_unicode_buffer, c_char, c_wchar, c_byte, c_ubyte, c_short, c_ushort, c_int, c_uint, c_long, c_ulonglong, c_float, c_double, c_longdouble) @@ -17,13 +16,6 @@ c_long, c_ulonglong, c_float, c_double, c_longdouble -def ARRAY(*args): - # ignore DeprecationWarning in tests - with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - return ctypes.ARRAY(*args) - - class ArrayTestCase(unittest.TestCase): def test_inheritance_hierarchy(self): self.assertEqual(Array.mro(), [Array, _CData, object]) @@ -275,10 +267,6 @@ def test_bpo36504_signed_int_overflow(self): def test_large_array(self, size): c_char * size - def test_deprecation(self): - with self.assertWarns(DeprecationWarning): - CharArray = ctypes.ARRAY(c_char, 3) - if __name__ == '__main__': unittest.main() diff --git a/Misc/NEWS.d/next/Library/2024-07-25-15-41-14.gh-issue-105733.o3koJA.rst b/Misc/NEWS.d/next/Library/2024-07-25-15-41-14.gh-issue-105733.o3koJA.rst new file mode 100644 index 00000000000000..60c5e69d2f6f9c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-07-25-15-41-14.gh-issue-105733.o3koJA.rst @@ -0,0 +1,2 @@ +:func:`ctypes.ARRAY` is now :term:`soft deprecated`: it no longer emits deprecation +warnings and is not scheduled for removal. From 3a9b2aae615165a40614db9aaa8b90c55ff0c7f9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9n=C3=A9dikt=20Tran?= <10796600+picnixz@users.noreply.github.com> Date: Tue, 30 Jul 2024 10:50:30 +0200 Subject: [PATCH 21/70] gh-122400: Handle ValueError in filecmp (GH-122401) --- Lib/filecmp.py | 10 +++--- Lib/test/test_filecmp.py | 33 +++++++++++++++++++ ...-07-29-16-47-08.gh-issue-122400.fM0YSv.rst | 3 ++ 3 files changed, 42 insertions(+), 4 deletions(-) create mode 100644 Misc/NEWS.d/next/Library/2024-07-29-16-47-08.gh-issue-122400.fM0YSv.rst diff --git a/Lib/filecmp.py b/Lib/filecmp.py index 020ea694ca63e9..c5b8d854d77de3 100644 --- a/Lib/filecmp.py +++ b/Lib/filecmp.py @@ -164,12 +164,14 @@ def phase2(self): # Distinguish files, directories, funnies ok = True try: a_stat = os.stat(a_path) - except OSError: + except (OSError, ValueError): + # See https://github.com/python/cpython/issues/122400 + # for the rationale for protecting against ValueError. # print('Can\'t stat', a_path, ':', why.args[1]) ok = False try: b_stat = os.stat(b_path) - except OSError: + except (OSError, ValueError): # print('Can\'t stat', b_path, ':', why.args[1]) ok = False @@ -285,12 +287,12 @@ def cmpfiles(a, b, common, shallow=True): # Return: # 0 for equal # 1 for different -# 2 for funny cases (can't stat, etc.) +# 2 for funny cases (can't stat, NUL bytes, etc.) # def _cmp(a, b, sh, abs=abs, cmp=cmp): try: return not abs(cmp(a, b, sh)) - except OSError: + except (OSError, ValueError): return 2 diff --git a/Lib/test/test_filecmp.py b/Lib/test/test_filecmp.py index 1fb47163719ede..2c83667b22feb4 100644 --- a/Lib/test/test_filecmp.py +++ b/Lib/test/test_filecmp.py @@ -156,6 +156,39 @@ def test_cmpfiles(self): (['file'], ['file2'], []), "Comparing mismatched directories fails") + def test_cmpfiles_invalid_names(self): + # See https://github.com/python/cpython/issues/122400. + for file, desc in [ + ('\x00', 'NUL bytes filename'), + (__file__ + '\x00', 'filename with embedded NUL bytes'), + ("\uD834\uDD1E.py", 'surrogate codes (MUSICAL SYMBOL G CLEF)'), + ('a' * 1_000_000, 'very long filename'), + ]: + for other_dir in [self.dir, self.dir_same, self.dir_diff]: + with self.subTest(f'cmpfiles: {desc}', other_dir=other_dir): + res = filecmp.cmpfiles(self.dir, other_dir, [file]) + self.assertTupleEqual(res, ([], [], [file])) + + def test_dircmp_invalid_names(self): + for bad_dir, desc in [ + ('\x00', 'NUL bytes dirname'), + (f'Top{os.sep}Mid\x00', 'dirname with embedded NUL bytes'), + ("\uD834\uDD1E", 'surrogate codes (MUSICAL SYMBOL G CLEF)'), + ('a' * 1_000_000, 'very long dirname'), + ]: + d1 = filecmp.dircmp(self.dir, bad_dir) + d2 = filecmp.dircmp(bad_dir, self.dir) + for target in [ + # attributes where os.listdir() raises OSError or ValueError + 'left_list', 'right_list', + 'left_only', 'right_only', 'common', + ]: + with self.subTest(f'dircmp(ok, bad): {desc}', target=target): + with self.assertRaises((OSError, ValueError)): + getattr(d1, target) + with self.subTest(f'dircmp(bad, ok): {desc}', target=target): + with self.assertRaises((OSError, ValueError)): + getattr(d2, target) def _assert_lists(self, actual, expected): """Assert that two lists are equal, up to ordering.""" diff --git a/Misc/NEWS.d/next/Library/2024-07-29-16-47-08.gh-issue-122400.fM0YSv.rst b/Misc/NEWS.d/next/Library/2024-07-29-16-47-08.gh-issue-122400.fM0YSv.rst new file mode 100644 index 00000000000000..8c47e94f78d9f0 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-07-29-16-47-08.gh-issue-122400.fM0YSv.rst @@ -0,0 +1,3 @@ +Handle :exc:`ValueError`\s raised by :func:`os.stat` in +:class:`filecmp.dircmp` and :func:`filecmp.cmpfiles`. +Patch by Bénédikt Tran. From d27a53fc02a87e76066fc4e15ff1fff3922a482d Mon Sep 17 00:00:00 2001 From: Clinton Date: Tue, 30 Jul 2024 04:53:07 -0400 Subject: [PATCH 22/70] gh-121474: Add threading.Barrier parties arg sanity check. (GH-121480) --- Lib/test/lock_tests.py | 4 ++++ Lib/threading.py | 2 ++ .../Library/2024-07-08-03-45-34.gh-issue-121474.NsvrUN.rst | 2 ++ 3 files changed, 8 insertions(+) create mode 100644 Misc/NEWS.d/next/Library/2024-07-08-03-45-34.gh-issue-121474.NsvrUN.rst diff --git a/Lib/test/lock_tests.py b/Lib/test/lock_tests.py index 024c6debcd4a54..8c8f8901f00178 100644 --- a/Lib/test/lock_tests.py +++ b/Lib/test/lock_tests.py @@ -1013,6 +1013,10 @@ def multipass(self, results, n): self.assertEqual(self.barrier.n_waiting, 0) self.assertFalse(self.barrier.broken) + def test_constructor(self): + self.assertRaises(ValueError, self.barriertype, parties=0) + self.assertRaises(ValueError, self.barriertype, parties=-1) + def test_barrier(self, passes=1): """ Test that a barrier is passed in lockstep diff --git a/Lib/threading.py b/Lib/threading.py index 2dcdd0c9e067b6..94ea2f08178369 100644 --- a/Lib/threading.py +++ b/Lib/threading.py @@ -689,6 +689,8 @@ def __init__(self, parties, action=None, timeout=None): default for all subsequent 'wait()' calls. """ + if parties < 1: + raise ValueError("parties must be > 0") self._cond = Condition(Lock()) self._action = action self._timeout = timeout diff --git a/Misc/NEWS.d/next/Library/2024-07-08-03-45-34.gh-issue-121474.NsvrUN.rst b/Misc/NEWS.d/next/Library/2024-07-08-03-45-34.gh-issue-121474.NsvrUN.rst new file mode 100644 index 00000000000000..605f30d76f5d47 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-07-08-03-45-34.gh-issue-121474.NsvrUN.rst @@ -0,0 +1,2 @@ +Fix missing sanity check for ``parties`` arg in :class:`threading.Barrier` +constructor. Patch by Clinton Christian (pygeek). From d1a1bca1f0550a4715f1bf32b1586caa7bc4487b Mon Sep 17 00:00:00 2001 From: Dino Viehland Date: Tue, 30 Jul 2024 05:03:52 -0700 Subject: [PATCH 23/70] gh-119896: Fix CTRL-Z behavior in the new REPL on Windows (GH-122217) --- Lib/_pyrepl/reader.py | 9 +++++++-- Lib/_pyrepl/simple_interact.py | 1 + Lib/_pyrepl/utils.py | 3 ++- Lib/_pyrepl/windows_console.py | 5 ++++- 4 files changed, 14 insertions(+), 4 deletions(-) diff --git a/Lib/_pyrepl/reader.py b/Lib/_pyrepl/reader.py index 8b282a382d374f..13b1f3eb9d118a 100644 --- a/Lib/_pyrepl/reader.py +++ b/Lib/_pyrepl/reader.py @@ -21,6 +21,8 @@ from __future__ import annotations +import sys + from contextlib import contextmanager from dataclasses import dataclass, field, fields import unicodedata @@ -52,7 +54,10 @@ def disp_str(buffer: str) -> tuple[str, list[int]]: b: list[int] = [] s: list[str] = [] for c in buffer: - if ord(c) < 128: + if c == '\x1a': + s.append(c) + b.append(2) + elif ord(c) < 128: s.append(c) b.append(1) elif unicodedata.category(c).startswith("C"): @@ -110,7 +115,7 @@ def make_default_commands() -> dict[CommandName, type[Command]]: (r"\C-w", "unix-word-rubout"), (r"\C-x\C-u", "upcase-region"), (r"\C-y", "yank"), - (r"\C-z", "suspend"), + *(() if sys.platform == "win32" else ((r"\C-z", "suspend"), )), (r"\M-b", "backward-word"), (r"\M-c", "capitalize-word"), (r"\M-d", "kill-word"), diff --git a/Lib/_pyrepl/simple_interact.py b/Lib/_pyrepl/simple_interact.py index 2c3dffe070c629..91aef5e01eb867 100644 --- a/Lib/_pyrepl/simple_interact.py +++ b/Lib/_pyrepl/simple_interact.py @@ -76,6 +76,7 @@ def _clear_screen(): "copyright": _sitebuiltins._Printer('copyright', sys.copyright), "help": "help", "clear": _clear_screen, + "\x1a": _sitebuiltins.Quitter('\x1a', ''), } diff --git a/Lib/_pyrepl/utils.py b/Lib/_pyrepl/utils.py index 20dbb1f7e17229..0f36083b6ffa92 100644 --- a/Lib/_pyrepl/utils.py +++ b/Lib/_pyrepl/utils.py @@ -21,4 +21,5 @@ def wlen(s: str) -> int: length = sum(str_width(i) for i in s) # remove lengths of any escape sequences sequence = ANSI_ESCAPE_SEQUENCE.findall(s) - return length - sum(len(i) for i in sequence) + ctrl_z_cnt = s.count('\x1a') + return length - sum(len(i) for i in sequence) + ctrl_z_cnt diff --git a/Lib/_pyrepl/windows_console.py b/Lib/_pyrepl/windows_console.py index 9e97b1524e29a0..ba9af36b8be99c 100644 --- a/Lib/_pyrepl/windows_console.py +++ b/Lib/_pyrepl/windows_console.py @@ -253,7 +253,7 @@ def __write_changed_line( else: self.__posxy = wlen(newline), y - if "\x1b" in newline or y != self.__posxy[1]: + if "\x1b" in newline or y != self.__posxy[1] or '\x1a' in newline: # ANSI escape characters are present, so we can't assume # anything about the position of the cursor. Moving the cursor # to the left margin should work to get to a known position. @@ -291,6 +291,9 @@ def _disable_blinking(self): self.__write("\x1b[?12l") def __write(self, text: str) -> None: + if "\x1a" in text: + text = ''.join(["^Z" if x == '\x1a' else x for x in text]) + if self.out is not None: self.out.write(text.encode(self.encoding, "replace")) self.out.flush() From 8fb88b22b7a932ff16002dd19e904f9cafd59e9f Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Tue, 30 Jul 2024 11:30:52 -0400 Subject: [PATCH 24/70] gh-121946: Temporarily switch to llvm-17 in TSan CI again (GH-122466) The Ubuntu package for llvm-18 is broken --- .github/workflows/reusable-tsan.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/reusable-tsan.yml b/.github/workflows/reusable-tsan.yml index b6d5d8fa1c7157..27f4eacd86fd95 100644 --- a/.github/workflows/reusable-tsan.yml +++ b/.github/workflows/reusable-tsan.yml @@ -36,11 +36,11 @@ jobs: # Install clang-18 wget https://apt.llvm.org/llvm.sh chmod +x llvm.sh - sudo ./llvm.sh 18 - sudo update-alternatives --install /usr/bin/clang clang /usr/bin/clang-18 100 - sudo update-alternatives --set clang /usr/bin/clang-18 - sudo update-alternatives --install /usr/bin/clang++ clang++ /usr/bin/clang++-18 100 - sudo update-alternatives --set clang++ /usr/bin/clang++-18 + sudo ./llvm.sh 17 # gh-121946: llvm-18 package is temporarily broken + sudo update-alternatives --install /usr/bin/clang clang /usr/bin/clang-17 100 + sudo update-alternatives --set clang /usr/bin/clang-17 + sudo update-alternatives --install /usr/bin/clang++ clang++ /usr/bin/clang++-17 100 + sudo update-alternatives --set clang++ /usr/bin/clang++-17 # Reduce ASLR to avoid TSAN crashing sudo sysctl -w vm.mmap_rnd_bits=28 - name: TSAN Option Setup From 1d8e45390733d3eb29164799ea10f8406f53e830 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Langa?= Date: Tue, 30 Jul 2024 18:57:19 +0200 Subject: [PATCH 25/70] gh-116402: Avoid readline in test_builtin TTY input tests (GH-122447) --- Lib/test/test_builtin.py | 39 ++++++++++++++++++++++++--------------- 1 file changed, 24 insertions(+), 15 deletions(-) diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py index 85f139db9bcd45..2ea97e797a4892 100644 --- a/Lib/test/test_builtin.py +++ b/Lib/test/test_builtin.py @@ -4,6 +4,7 @@ import asyncio import builtins import collections +import contextlib import decimal import fractions import gc @@ -31,6 +32,7 @@ from operator import neg from test import support from test.support import (cpython_only, swap_attr, maybe_get_event_loop_policy) +from test.support.import_helper import import_module from test.support.os_helper import (EnvironmentVarGuard, TESTFN, unlink) from test.support.script_helper import assert_python_ok from test.support.warnings_helper import check_warnings @@ -2412,7 +2414,8 @@ def child(wpipe): print(ascii(input(prompt)), file=wpipe) except BaseException as e: print(ascii(f'{e.__class__.__name__}: {e!s}'), file=wpipe) - lines = self.run_child(child, terminal_input + b"\r\n") + with self.detach_readline(): + lines = self.run_child(child, terminal_input + b"\r\n") # Check we did exercise the GNU readline path self.assertIn(lines[0], {'tty = True', 'tty = False'}) if lines[0] != 'tty = True': @@ -2425,28 +2428,36 @@ def child(wpipe): expected = terminal_input.decode(sys.stdin.encoding) # what else? self.assertEqual(input_result, expected) - def test_input_tty(self): - # Test input() functionality when wired to a tty (the code path - # is different and invokes GNU readline if available). - self.check_input_tty("prompt", b"quux") - - def skip_if_readline(self): + @contextlib.contextmanager + def detach_readline(self): # bpo-13886: When the readline module is loaded, PyOS_Readline() uses # the readline implementation. In some cases, the Python readline # callback rlhandler() is called by readline with a string without - # non-ASCII characters. Skip tests on non-ASCII characters if the - # readline module is loaded, since test_builtin is not intended to test + # non-ASCII characters. + # Unlink readline temporarily from PyOS_Readline() for those tests, + # since test_builtin is not intended to test # the readline module, but the builtins module. - if 'readline' in sys.modules: - self.skipTest("the readline module is loaded") + if "readline" in sys.modules: + c = import_module("ctypes") + fp_api = "PyOS_ReadlineFunctionPointer" + prev_value = c.c_void_p.in_dll(c.pythonapi, fp_api).value + c.c_void_p.in_dll(c.pythonapi, fp_api).value = None + try: + yield + finally: + c.c_void_p.in_dll(c.pythonapi, fp_api).value = prev_value + else: + yield + + def test_input_tty(self): + # Test input() functionality when wired to a tty + self.check_input_tty("prompt", b"quux") def test_input_tty_non_ascii(self): - self.skip_if_readline() # Check stdin/stdout encoding is used when invoking PyOS_Readline() self.check_input_tty("prompté", b"quux\xc3\xa9", "utf-8") def test_input_tty_non_ascii_unicode_errors(self): - self.skip_if_readline() # Check stdin/stdout error handler is used when invoking PyOS_Readline() self.check_input_tty("prompté", b"quux\xe9", "ascii") @@ -2456,14 +2467,12 @@ def test_input_tty_null_in_prompt(self): 'null characters') def test_input_tty_nonencodable_prompt(self): - self.skip_if_readline() self.check_input_tty("prompté", b"quux", "ascii", stdout_errors='strict', expected="UnicodeEncodeError: 'ascii' codec can't encode " "character '\\xe9' in position 6: ordinal not in " "range(128)") def test_input_tty_nondecodable_input(self): - self.skip_if_readline() self.check_input_tty("prompt", b"quux\xe9", "ascii", stdin_errors='strict', expected="UnicodeDecodeError: 'ascii' codec can't decode " "byte 0xe9 in position 4: ordinal not in " From 2b163aa9e796b312bb0549d49145d26e4904768e Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Tue, 30 Jul 2024 13:53:47 -0400 Subject: [PATCH 26/70] gh-117657: Avoid race in `PAUSE_ADAPTIVE_COUNTER` in free-threaded build (#122190) The adaptive counter doesn't do anything currently in the free-threaded build and TSan reports a data race due to concurrent modifications to the counter. --- Python/ceval_macros.h | 3 ++- Tools/tsan/suppressions_free_threading.txt | 1 - 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Python/ceval_macros.h b/Python/ceval_macros.h index 60efe3d78ff22c..2881ed2153a7c1 100644 --- a/Python/ceval_macros.h +++ b/Python/ceval_macros.h @@ -316,17 +316,18 @@ GETITEM(PyObject *v, Py_ssize_t i) { /* gh-115999 tracks progress on addressing this. */ \ static_assert(0, "The specializing interpreter is not yet thread-safe"); \ } while (0); +#define PAUSE_ADAPTIVE_COUNTER(COUNTER) ((void)COUNTER) #else #define ADVANCE_ADAPTIVE_COUNTER(COUNTER) \ do { \ (COUNTER) = advance_backoff_counter((COUNTER)); \ } while (0); -#endif #define PAUSE_ADAPTIVE_COUNTER(COUNTER) \ do { \ (COUNTER) = pause_backoff_counter((COUNTER)); \ } while (0); +#endif #define UNBOUNDLOCAL_ERROR_MSG \ "cannot access local variable '%s' where it is not associated with a value" diff --git a/Tools/tsan/suppressions_free_threading.txt b/Tools/tsan/suppressions_free_threading.txt index a54e66d1212d1f..78449aed4009d3 100644 --- a/Tools/tsan/suppressions_free_threading.txt +++ b/Tools/tsan/suppressions_free_threading.txt @@ -23,7 +23,6 @@ race:free_threadstate # These warnings trigger directly in a CPython function. -race_top:_PyEval_EvalFrameDefault race_top:assign_version_tag race_top:new_reference race_top:_multiprocessing_SemLock_acquire_impl From af0a00f022d0fb8f1edb4abdda1bc6b915f0448d Mon Sep 17 00:00:00 2001 From: Michael Droettboom Date: Tue, 30 Jul 2024 15:31:05 -0400 Subject: [PATCH 27/70] gh-122188: Move magic number to its own file (#122243) * gh-122188: Move magic number to its own file * Add versionadded directive * Do work in C * Integrate launcher.c * Make _pyc_magic_number private * Remove metadata * Move sys.implementation -> _imp * Modernize comment * Move _RAW_MAGIC_NUMBER to the C side as well * _pyc_magic_number -> pyc_magic_number * Remove unused import * Update docs * Apply suggestions from code review Co-authored-by: Eric Snow * Fix typo in tests --------- Co-authored-by: Eric Snow --- Include/internal/pycore_magic_number.h | 280 +++++++++++++++++++++++++ InternalDocs/compiler.md | 2 +- Lib/importlib/_bootstrap_external.py | 279 +----------------------- Lib/importlib/util.py | 3 +- Lib/test/test_import/__init__.py | 9 + Lib/zipimport.py | 2 +- Python/import.c | 27 ++- 7 files changed, 307 insertions(+), 295 deletions(-) create mode 100644 Include/internal/pycore_magic_number.h diff --git a/Include/internal/pycore_magic_number.h b/Include/internal/pycore_magic_number.h new file mode 100644 index 00000000000000..3af6817e9fde85 --- /dev/null +++ b/Include/internal/pycore_magic_number.h @@ -0,0 +1,280 @@ +#ifndef Py_INTERNAL_MAGIC_NUMBER_H +#define Py_INTERNAL_MAGIC_NUMBER_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + +/* + +Magic number to reject .pyc files generated by other Python versions. +It should change for each incompatible change to the bytecode. + +PYC_MAGIC_NUMBER must change whenever the bytecode emitted by the compiler may +no longer be understood by older implementations of the eval loop (usually due +to the addition of new opcodes). + +The value of CR and LF is incorporated so if you ever read or write +a .pyc file in text mode the magic number will be wrong; also, the +Apple MPW compiler swaps their values, botching string constants. + +There were a variety of old schemes for setting the magic number. Starting with +Python 3.11, Python 3.n starts with magic number 2900+50n. Within each minor +version, the magic number is incremented by 1 each time the file format changes. + +Known values: + Python 1.5: 20121 + Python 1.5.1: 20121 + Python 1.5.2: 20121 + Python 1.6: 50428 + Python 2.0: 50823 + Python 2.0.1: 50823 + Python 2.1: 60202 + Python 2.1.1: 60202 + Python 2.1.2: 60202 + Python 2.2: 60717 + Python 2.3a0: 62011 + Python 2.3a0: 62021 + Python 2.3a0: 62011 (!) + Python 2.4a0: 62041 + Python 2.4a3: 62051 + Python 2.4b1: 62061 + Python 2.5a0: 62071 + Python 2.5a0: 62081 (ast-branch) + Python 2.5a0: 62091 (with) + Python 2.5a0: 62092 (changed WITH_CLEANUP opcode) + Python 2.5b3: 62101 (fix wrong code: for x, in ...) + Python 2.5b3: 62111 (fix wrong code: x += yield) + Python 2.5c1: 62121 (fix wrong lnotab with for loops and + storing constants that should have been removed) + Python 2.5c2: 62131 (fix wrong code: for x, in ... in listcomp/genexp) + Python 2.6a0: 62151 (peephole optimizations and STORE_MAP opcode) + Python 2.6a1: 62161 (WITH_CLEANUP optimization) + Python 2.7a0: 62171 (optimize list comprehensions/change LIST_APPEND) + Python 2.7a0: 62181 (optimize conditional branches: + introduce POP_JUMP_IF_FALSE and POP_JUMP_IF_TRUE) + Python 2.7a0 62191 (introduce SETUP_WITH) + Python 2.7a0 62201 (introduce BUILD_SET) + Python 2.7a0 62211 (introduce MAP_ADD and SET_ADD) + Python 3000: 3000 + 3010 (removed UNARY_CONVERT) + 3020 (added BUILD_SET) + 3030 (added keyword-only parameters) + 3040 (added signature annotations) + 3050 (print becomes a function) + 3060 (PEP 3115 metaclass syntax) + 3061 (string literals become unicode) + 3071 (PEP 3109 raise changes) + 3081 (PEP 3137 make __file__ and __name__ unicode) + 3091 (kill str8 interning) + 3101 (merge from 2.6a0, see 62151) + 3103 (__file__ points to source file) + Python 3.0a4: 3111 (WITH_CLEANUP optimization). + Python 3.0b1: 3131 (lexical exception stacking, including POP_EXCEPT + #3021) + Python 3.1a1: 3141 (optimize list, set and dict comprehensions: + change LIST_APPEND and SET_ADD, add MAP_ADD #2183) + Python 3.1a1: 3151 (optimize conditional branches: + introduce POP_JUMP_IF_FALSE and POP_JUMP_IF_TRUE + #4715) + Python 3.2a1: 3160 (add SETUP_WITH #6101) + Python 3.2a2: 3170 (add DUP_TOP_TWO, remove DUP_TOPX and ROT_FOUR #9225) + Python 3.2a3 3180 (add DELETE_DEREF #4617) + Python 3.3a1 3190 (__class__ super closure changed) + Python 3.3a1 3200 (PEP 3155 __qualname__ added #13448) + Python 3.3a1 3210 (added size modulo 2**32 to the pyc header #13645) + Python 3.3a2 3220 (changed PEP 380 implementation #14230) + Python 3.3a4 3230 (revert changes to implicit __class__ closure #14857) + Python 3.4a1 3250 (evaluate positional default arguments before + keyword-only defaults #16967) + Python 3.4a1 3260 (add LOAD_CLASSDEREF; allow locals of class to override + free vars #17853) + Python 3.4a1 3270 (various tweaks to the __class__ closure #12370) + Python 3.4a1 3280 (remove implicit class argument) + Python 3.4a4 3290 (changes to __qualname__ computation #19301) + Python 3.4a4 3300 (more changes to __qualname__ computation #19301) + Python 3.4rc2 3310 (alter __qualname__ computation #20625) + Python 3.5a1 3320 (PEP 465: Matrix multiplication operator #21176) + Python 3.5b1 3330 (PEP 448: Additional Unpacking Generalizations #2292) + Python 3.5b2 3340 (fix dictionary display evaluation order #11205) + Python 3.5b3 3350 (add GET_YIELD_FROM_ITER opcode #24400) + Python 3.5.2 3351 (fix BUILD_MAP_UNPACK_WITH_CALL opcode #27286) + Python 3.6a0 3360 (add FORMAT_VALUE opcode #25483) + Python 3.6a1 3361 (lineno delta of code.co_lnotab becomes signed #26107) + Python 3.6a2 3370 (16 bit wordcode #26647) + Python 3.6a2 3371 (add BUILD_CONST_KEY_MAP opcode #27140) + Python 3.6a2 3372 (MAKE_FUNCTION simplification, remove MAKE_CLOSURE + #27095) + Python 3.6b1 3373 (add BUILD_STRING opcode #27078) + Python 3.6b1 3375 (add SETUP_ANNOTATIONS and STORE_ANNOTATION opcodes + #27985) + Python 3.6b1 3376 (simplify CALL_FUNCTIONs & BUILD_MAP_UNPACK_WITH_CALL + #27213) + Python 3.6b1 3377 (set __class__ cell from type.__new__ #23722) + Python 3.6b2 3378 (add BUILD_TUPLE_UNPACK_WITH_CALL #28257) + Python 3.6rc1 3379 (more thorough __class__ validation #23722) + Python 3.7a1 3390 (add LOAD_METHOD and CALL_METHOD opcodes #26110) + Python 3.7a2 3391 (update GET_AITER #31709) + Python 3.7a4 3392 (PEP 552: Deterministic pycs #31650) + Python 3.7b1 3393 (remove STORE_ANNOTATION opcode #32550) + Python 3.7b5 3394 (restored docstring as the first stmt in the body; + this might affected the first line number #32911) + Python 3.8a1 3400 (move frame block handling to compiler #17611) + Python 3.8a1 3401 (add END_ASYNC_FOR #33041) + Python 3.8a1 3410 (PEP570 Python Positional-Only Parameters #36540) + Python 3.8b2 3411 (Reverse evaluation order of key: value in dict + comprehensions #35224) + Python 3.8b2 3412 (Swap the position of positional args and positional + only args in ast.arguments #37593) + Python 3.8b4 3413 (Fix "break" and "continue" in "finally" #37830) + Python 3.9a0 3420 (add LOAD_ASSERTION_ERROR #34880) + Python 3.9a0 3421 (simplified bytecode for with blocks #32949) + Python 3.9a0 3422 (remove BEGIN_FINALLY, END_FINALLY, CALL_FINALLY, POP_FINALLY bytecodes #33387) + Python 3.9a2 3423 (add IS_OP, CONTAINS_OP and JUMP_IF_NOT_EXC_MATCH bytecodes #39156) + Python 3.9a2 3424 (simplify bytecodes for *value unpacking) + Python 3.9a2 3425 (simplify bytecodes for **value unpacking) + Python 3.10a1 3430 (Make 'annotations' future by default) + Python 3.10a1 3431 (New line number table format -- PEP 626) + Python 3.10a2 3432 (Function annotation for MAKE_FUNCTION is changed from dict to tuple bpo-42202) + Python 3.10a2 3433 (RERAISE restores f_lasti if oparg != 0) + Python 3.10a6 3434 (PEP 634: Structural Pattern Matching) + Python 3.10a7 3435 Use instruction offsets (as opposed to byte offsets). + Python 3.10b1 3436 (Add GEN_START bytecode #43683) + Python 3.10b1 3437 (Undo making 'annotations' future by default - We like to dance among core devs!) + Python 3.10b1 3438 Safer line number table handling. + Python 3.10b1 3439 (Add ROT_N) + Python 3.11a1 3450 Use exception table for unwinding ("zero cost" exception handling) + Python 3.11a1 3451 (Add CALL_METHOD_KW) + Python 3.11a1 3452 (drop nlocals from marshaled code objects) + Python 3.11a1 3453 (add co_fastlocalnames and co_fastlocalkinds) + Python 3.11a1 3454 (compute cell offsets relative to locals bpo-43693) + Python 3.11a1 3455 (add MAKE_CELL bpo-43693) + Python 3.11a1 3456 (interleave cell args bpo-43693) + Python 3.11a1 3457 (Change localsplus to a bytes object bpo-43693) + Python 3.11a1 3458 (imported objects now don't use LOAD_METHOD/CALL_METHOD) + Python 3.11a1 3459 (PEP 657: add end line numbers and column offsets for instructions) + Python 3.11a1 3460 (Add co_qualname field to PyCodeObject bpo-44530) + Python 3.11a1 3461 (JUMP_ABSOLUTE must jump backwards) + Python 3.11a2 3462 (bpo-44511: remove COPY_DICT_WITHOUT_KEYS, change + MATCH_CLASS and MATCH_KEYS, and add COPY) + Python 3.11a3 3463 (bpo-45711: JUMP_IF_NOT_EXC_MATCH no longer pops the + active exception) + Python 3.11a3 3464 (bpo-45636: Merge numeric BINARY_*INPLACE_* into + BINARY_OP) + Python 3.11a3 3465 (Add COPY_FREE_VARS opcode) + Python 3.11a4 3466 (bpo-45292: PEP-654 except*) + Python 3.11a4 3467 (Change CALL_xxx opcodes) + Python 3.11a4 3468 (Add SEND opcode) + Python 3.11a4 3469 (bpo-45711: remove type, traceback from exc_info) + Python 3.11a4 3470 (bpo-46221: PREP_RERAISE_STAR no longer pushes lasti) + Python 3.11a4 3471 (bpo-46202: remove pop POP_EXCEPT_AND_RERAISE) + Python 3.11a4 3472 (bpo-46009: replace GEN_START with POP_TOP) + Python 3.11a4 3473 (Add POP_JUMP_IF_NOT_NONE/POP_JUMP_IF_NONE opcodes) + Python 3.11a4 3474 (Add RESUME opcode) + Python 3.11a5 3475 (Add RETURN_GENERATOR opcode) + Python 3.11a5 3476 (Add ASYNC_GEN_WRAP opcode) + Python 3.11a5 3477 (Replace DUP_TOP/DUP_TOP_TWO with COPY and + ROT_TWO/ROT_THREE/ROT_FOUR/ROT_N with SWAP) + Python 3.11a5 3478 (New CALL opcodes) + Python 3.11a5 3479 (Add PUSH_NULL opcode) + Python 3.11a5 3480 (New CALL opcodes, second iteration) + Python 3.11a5 3481 (Use inline cache for BINARY_OP) + Python 3.11a5 3482 (Use inline caching for UNPACK_SEQUENCE and LOAD_GLOBAL) + Python 3.11a5 3483 (Use inline caching for COMPARE_OP and BINARY_SUBSCR) + Python 3.11a5 3484 (Use inline caching for LOAD_ATTR, LOAD_METHOD, and + STORE_ATTR) + Python 3.11a5 3485 (Add an oparg to GET_AWAITABLE) + Python 3.11a6 3486 (Use inline caching for PRECALL and CALL) + Python 3.11a6 3487 (Remove the adaptive "oparg counter" mechanism) + Python 3.11a6 3488 (LOAD_GLOBAL can push additional NULL) + Python 3.11a6 3489 (Add JUMP_BACKWARD, remove JUMP_ABSOLUTE) + Python 3.11a6 3490 (remove JUMP_IF_NOT_EXC_MATCH, add CHECK_EXC_MATCH) + Python 3.11a6 3491 (remove JUMP_IF_NOT_EG_MATCH, add CHECK_EG_MATCH, + add JUMP_BACKWARD_NO_INTERRUPT, make JUMP_NO_INTERRUPT virtual) + Python 3.11a7 3492 (make POP_JUMP_IF_NONE/NOT_NONE/TRUE/FALSE relative) + Python 3.11a7 3493 (Make JUMP_IF_TRUE_OR_POP/JUMP_IF_FALSE_OR_POP relative) + Python 3.11a7 3494 (New location info table) + Python 3.11b4 3495 (Set line number of module's RESUME instr to 0 per PEP 626) + Python 3.12a1 3500 (Remove PRECALL opcode) + Python 3.12a1 3501 (YIELD_VALUE oparg == stack_depth) + Python 3.12a1 3502 (LOAD_FAST_CHECK, no NULL-check in LOAD_FAST) + Python 3.12a1 3503 (Shrink LOAD_METHOD cache) + Python 3.12a1 3504 (Merge LOAD_METHOD back into LOAD_ATTR) + Python 3.12a1 3505 (Specialization/Cache for FOR_ITER) + Python 3.12a1 3506 (Add BINARY_SLICE and STORE_SLICE instructions) + Python 3.12a1 3507 (Set lineno of module's RESUME to 0) + Python 3.12a1 3508 (Add CLEANUP_THROW) + Python 3.12a1 3509 (Conditional jumps only jump forward) + Python 3.12a2 3510 (FOR_ITER leaves iterator on the stack) + Python 3.12a2 3511 (Add STOPITERATION_ERROR instruction) + Python 3.12a2 3512 (Remove all unused consts from code objects) + Python 3.12a4 3513 (Add CALL_INTRINSIC_1 instruction, removed STOPITERATION_ERROR, PRINT_EXPR, IMPORT_STAR) + Python 3.12a4 3514 (Remove ASYNC_GEN_WRAP, LIST_TO_TUPLE, and UNARY_POSITIVE) + Python 3.12a5 3515 (Embed jump mask in COMPARE_OP oparg) + Python 3.12a5 3516 (Add COMPARE_AND_BRANCH instruction) + Python 3.12a5 3517 (Change YIELD_VALUE oparg to exception block depth) + Python 3.12a6 3518 (Add RETURN_CONST instruction) + Python 3.12a6 3519 (Modify SEND instruction) + Python 3.12a6 3520 (Remove PREP_RERAISE_STAR, add CALL_INTRINSIC_2) + Python 3.12a7 3521 (Shrink the LOAD_GLOBAL caches) + Python 3.12a7 3522 (Removed JUMP_IF_FALSE_OR_POP/JUMP_IF_TRUE_OR_POP) + Python 3.12a7 3523 (Convert COMPARE_AND_BRANCH back to COMPARE_OP) + Python 3.12a7 3524 (Shrink the BINARY_SUBSCR caches) + Python 3.12b1 3525 (Shrink the CALL caches) + Python 3.12b1 3526 (Add instrumentation support) + Python 3.12b1 3527 (Add LOAD_SUPER_ATTR) + Python 3.12b1 3528 (Add LOAD_SUPER_ATTR_METHOD specialization) + Python 3.12b1 3529 (Inline list/dict/set comprehensions) + Python 3.12b1 3530 (Shrink the LOAD_SUPER_ATTR caches) + Python 3.12b1 3531 (Add PEP 695 changes) + Python 3.13a1 3550 (Plugin optimizer support) + Python 3.13a1 3551 (Compact superinstructions) + Python 3.13a1 3552 (Remove LOAD_FAST__LOAD_CONST and LOAD_CONST__LOAD_FAST) + Python 3.13a1 3553 (Add SET_FUNCTION_ATTRIBUTE) + Python 3.13a1 3554 (more efficient bytecodes for f-strings) + Python 3.13a1 3555 (generate specialized opcodes metadata from bytecodes.c) + Python 3.13a1 3556 (Convert LOAD_CLOSURE to a pseudo-op) + Python 3.13a1 3557 (Make the conversion to boolean in jumps explicit) + Python 3.13a1 3558 (Reorder the stack items for CALL) + Python 3.13a1 3559 (Generate opcode IDs from bytecodes.c) + Python 3.13a1 3560 (Add RESUME_CHECK instruction) + Python 3.13a1 3561 (Add cache entry to branch instructions) + Python 3.13a1 3562 (Assign opcode IDs for internal ops in separate range) + Python 3.13a1 3563 (Add CALL_KW and remove KW_NAMES) + Python 3.13a1 3564 (Removed oparg from YIELD_VALUE, changed oparg values of RESUME) + Python 3.13a1 3565 (Oparg of YIELD_VALUE indicates whether it is in a yield-from) + Python 3.13a1 3566 (Emit JUMP_NO_INTERRUPT instead of JUMP for non-loop no-lineno cases) + Python 3.13a1 3567 (Reimplement line number propagation by the compiler) + Python 3.13a1 3568 (Change semantics of END_FOR) + Python 3.13a5 3569 (Specialize CONTAINS_OP) + Python 3.13a6 3570 (Add __firstlineno__ class attribute) + Python 3.14a1 3600 (Add LOAD_COMMON_CONSTANT) + Python 3.14a1 3601 (Fix miscompilation of private names in generic classes) + Python 3.14a1 3602 (Add LOAD_SPECIAL. Remove BEFORE_WITH and BEFORE_ASYNC_WITH) + Python 3.14a1 3603 (Remove BUILD_CONST_KEY_MAP) + + Python 3.15 will start with 3650 + + Please don't copy-paste the same pre-release tag for new entries above!!! + You should always use the *upcoming* tag. For example, if 3.12a6 came out + a week ago, I should put "Python 3.12a7" next to my new magic number. + +Whenever PYC_MAGIC_NUMBER is changed, the ranges in the magic_values array in +PC/launcher.c must also be updated. + +*/ + +#define PYC_MAGIC_NUMBER 3603 +/* This is equivalent to converting PYC_MAGIC_NUMBER to 2 bytes + (little-endian) and then appending b'\r\n'. */ +#define PYC_MAGIC_NUMBER_TOKEN \ + ((uint32_t)PYC_MAGIC_NUMBER | ((uint32_t)'\r' << 16) | ((uint32_t)'\n' << 24)) + + +#ifdef __cplusplus +} +#endif +#endif // !Py_INTERNAL_MAGIC_NUMBER_H diff --git a/InternalDocs/compiler.md b/InternalDocs/compiler.md index b3dc0a48069969..52a3ab2f0a4abd 100644 --- a/InternalDocs/compiler.md +++ b/InternalDocs/compiler.md @@ -616,7 +616,7 @@ Important files * [Lib/opcode.py](https://github.com/python/cpython/blob/main/Lib/opcode.py) : opcode utilities exposed to Python. - * [Lib/importlib/_bootstrap_external.py](https://github.com/python/cpython/blob/main/Lib/importlib/_bootstrap_external.py) + * [Include/core/pycore_magic_number.h](https://github.com/python/cpython/blob/main/Include/internal/pycore_magic_number.h) : Home of the magic number (named ``MAGIC_NUMBER``) for bytecode versioning. diff --git a/Lib/importlib/_bootstrap_external.py b/Lib/importlib/_bootstrap_external.py index 2bb44b290e4a84..4d154dc4c25edc 100644 --- a/Lib/importlib/_bootstrap_external.py +++ b/Lib/importlib/_bootstrap_external.py @@ -221,280 +221,7 @@ def _write_atomic(path, data, mode=0o666): _code_type = type(_write_atomic.__code__) - -# Finder/loader utility code ############################################### - -# Magic word to reject .pyc files generated by other Python versions. -# It should change for each incompatible change to the bytecode. -# -# The value of CR and LF is incorporated so if you ever read or write -# a .pyc file in text mode the magic number will be wrong; also, the -# Apple MPW compiler swaps their values, botching string constants. -# -# There were a variety of old schemes for setting the magic number. -# The current working scheme is to increment the previous value by -# 10. -# -# Starting with the adoption of PEP 3147 in Python 3.2, every bump in magic -# number also includes a new "magic tag", i.e. a human readable string used -# to represent the magic number in __pycache__ directories. When you change -# the magic number, you must also set a new unique magic tag. Generally this -# can be named after the Python major version of the magic number bump, but -# it can really be anything, as long as it's different than anything else -# that's come before. The tags are included in the following table, starting -# with Python 3.2a0. -# -# Known values: -# Python 1.5: 20121 -# Python 1.5.1: 20121 -# Python 1.5.2: 20121 -# Python 1.6: 50428 -# Python 2.0: 50823 -# Python 2.0.1: 50823 -# Python 2.1: 60202 -# Python 2.1.1: 60202 -# Python 2.1.2: 60202 -# Python 2.2: 60717 -# Python 2.3a0: 62011 -# Python 2.3a0: 62021 -# Python 2.3a0: 62011 (!) -# Python 2.4a0: 62041 -# Python 2.4a3: 62051 -# Python 2.4b1: 62061 -# Python 2.5a0: 62071 -# Python 2.5a0: 62081 (ast-branch) -# Python 2.5a0: 62091 (with) -# Python 2.5a0: 62092 (changed WITH_CLEANUP opcode) -# Python 2.5b3: 62101 (fix wrong code: for x, in ...) -# Python 2.5b3: 62111 (fix wrong code: x += yield) -# Python 2.5c1: 62121 (fix wrong lnotab with for loops and -# storing constants that should have been removed) -# Python 2.5c2: 62131 (fix wrong code: for x, in ... in listcomp/genexp) -# Python 2.6a0: 62151 (peephole optimizations and STORE_MAP opcode) -# Python 2.6a1: 62161 (WITH_CLEANUP optimization) -# Python 2.7a0: 62171 (optimize list comprehensions/change LIST_APPEND) -# Python 2.7a0: 62181 (optimize conditional branches: -# introduce POP_JUMP_IF_FALSE and POP_JUMP_IF_TRUE) -# Python 2.7a0 62191 (introduce SETUP_WITH) -# Python 2.7a0 62201 (introduce BUILD_SET) -# Python 2.7a0 62211 (introduce MAP_ADD and SET_ADD) -# Python 3000: 3000 -# 3010 (removed UNARY_CONVERT) -# 3020 (added BUILD_SET) -# 3030 (added keyword-only parameters) -# 3040 (added signature annotations) -# 3050 (print becomes a function) -# 3060 (PEP 3115 metaclass syntax) -# 3061 (string literals become unicode) -# 3071 (PEP 3109 raise changes) -# 3081 (PEP 3137 make __file__ and __name__ unicode) -# 3091 (kill str8 interning) -# 3101 (merge from 2.6a0, see 62151) -# 3103 (__file__ points to source file) -# Python 3.0a4: 3111 (WITH_CLEANUP optimization). -# Python 3.0b1: 3131 (lexical exception stacking, including POP_EXCEPT - #3021) -# Python 3.1a1: 3141 (optimize list, set and dict comprehensions: -# change LIST_APPEND and SET_ADD, add MAP_ADD #2183) -# Python 3.1a1: 3151 (optimize conditional branches: -# introduce POP_JUMP_IF_FALSE and POP_JUMP_IF_TRUE - #4715) -# Python 3.2a1: 3160 (add SETUP_WITH #6101) -# tag: cpython-32 -# Python 3.2a2: 3170 (add DUP_TOP_TWO, remove DUP_TOPX and ROT_FOUR #9225) -# tag: cpython-32 -# Python 3.2a3 3180 (add DELETE_DEREF #4617) -# Python 3.3a1 3190 (__class__ super closure changed) -# Python 3.3a1 3200 (PEP 3155 __qualname__ added #13448) -# Python 3.3a1 3210 (added size modulo 2**32 to the pyc header #13645) -# Python 3.3a2 3220 (changed PEP 380 implementation #14230) -# Python 3.3a4 3230 (revert changes to implicit __class__ closure #14857) -# Python 3.4a1 3250 (evaluate positional default arguments before -# keyword-only defaults #16967) -# Python 3.4a1 3260 (add LOAD_CLASSDEREF; allow locals of class to override -# free vars #17853) -# Python 3.4a1 3270 (various tweaks to the __class__ closure #12370) -# Python 3.4a1 3280 (remove implicit class argument) -# Python 3.4a4 3290 (changes to __qualname__ computation #19301) -# Python 3.4a4 3300 (more changes to __qualname__ computation #19301) -# Python 3.4rc2 3310 (alter __qualname__ computation #20625) -# Python 3.5a1 3320 (PEP 465: Matrix multiplication operator #21176) -# Python 3.5b1 3330 (PEP 448: Additional Unpacking Generalizations #2292) -# Python 3.5b2 3340 (fix dictionary display evaluation order #11205) -# Python 3.5b3 3350 (add GET_YIELD_FROM_ITER opcode #24400) -# Python 3.5.2 3351 (fix BUILD_MAP_UNPACK_WITH_CALL opcode #27286) -# Python 3.6a0 3360 (add FORMAT_VALUE opcode #25483) -# Python 3.6a1 3361 (lineno delta of code.co_lnotab becomes signed #26107) -# Python 3.6a2 3370 (16 bit wordcode #26647) -# Python 3.6a2 3371 (add BUILD_CONST_KEY_MAP opcode #27140) -# Python 3.6a2 3372 (MAKE_FUNCTION simplification, remove MAKE_CLOSURE -# #27095) -# Python 3.6b1 3373 (add BUILD_STRING opcode #27078) -# Python 3.6b1 3375 (add SETUP_ANNOTATIONS and STORE_ANNOTATION opcodes -# #27985) -# Python 3.6b1 3376 (simplify CALL_FUNCTIONs & BUILD_MAP_UNPACK_WITH_CALL - #27213) -# Python 3.6b1 3377 (set __class__ cell from type.__new__ #23722) -# Python 3.6b2 3378 (add BUILD_TUPLE_UNPACK_WITH_CALL #28257) -# Python 3.6rc1 3379 (more thorough __class__ validation #23722) -# Python 3.7a1 3390 (add LOAD_METHOD and CALL_METHOD opcodes #26110) -# Python 3.7a2 3391 (update GET_AITER #31709) -# Python 3.7a4 3392 (PEP 552: Deterministic pycs #31650) -# Python 3.7b1 3393 (remove STORE_ANNOTATION opcode #32550) -# Python 3.7b5 3394 (restored docstring as the first stmt in the body; -# this might affected the first line number #32911) -# Python 3.8a1 3400 (move frame block handling to compiler #17611) -# Python 3.8a1 3401 (add END_ASYNC_FOR #33041) -# Python 3.8a1 3410 (PEP570 Python Positional-Only Parameters #36540) -# Python 3.8b2 3411 (Reverse evaluation order of key: value in dict -# comprehensions #35224) -# Python 3.8b2 3412 (Swap the position of positional args and positional -# only args in ast.arguments #37593) -# Python 3.8b4 3413 (Fix "break" and "continue" in "finally" #37830) -# Python 3.9a0 3420 (add LOAD_ASSERTION_ERROR #34880) -# Python 3.9a0 3421 (simplified bytecode for with blocks #32949) -# Python 3.9a0 3422 (remove BEGIN_FINALLY, END_FINALLY, CALL_FINALLY, POP_FINALLY bytecodes #33387) -# Python 3.9a2 3423 (add IS_OP, CONTAINS_OP and JUMP_IF_NOT_EXC_MATCH bytecodes #39156) -# Python 3.9a2 3424 (simplify bytecodes for *value unpacking) -# Python 3.9a2 3425 (simplify bytecodes for **value unpacking) -# Python 3.10a1 3430 (Make 'annotations' future by default) -# Python 3.10a1 3431 (New line number table format -- PEP 626) -# Python 3.10a2 3432 (Function annotation for MAKE_FUNCTION is changed from dict to tuple bpo-42202) -# Python 3.10a2 3433 (RERAISE restores f_lasti if oparg != 0) -# Python 3.10a6 3434 (PEP 634: Structural Pattern Matching) -# Python 3.10a7 3435 Use instruction offsets (as opposed to byte offsets). -# Python 3.10b1 3436 (Add GEN_START bytecode #43683) -# Python 3.10b1 3437 (Undo making 'annotations' future by default - We like to dance among core devs!) -# Python 3.10b1 3438 Safer line number table handling. -# Python 3.10b1 3439 (Add ROT_N) -# Python 3.11a1 3450 Use exception table for unwinding ("zero cost" exception handling) -# Python 3.11a1 3451 (Add CALL_METHOD_KW) -# Python 3.11a1 3452 (drop nlocals from marshaled code objects) -# Python 3.11a1 3453 (add co_fastlocalnames and co_fastlocalkinds) -# Python 3.11a1 3454 (compute cell offsets relative to locals bpo-43693) -# Python 3.11a1 3455 (add MAKE_CELL bpo-43693) -# Python 3.11a1 3456 (interleave cell args bpo-43693) -# Python 3.11a1 3457 (Change localsplus to a bytes object bpo-43693) -# Python 3.11a1 3458 (imported objects now don't use LOAD_METHOD/CALL_METHOD) -# Python 3.11a1 3459 (PEP 657: add end line numbers and column offsets for instructions) -# Python 3.11a1 3460 (Add co_qualname field to PyCodeObject bpo-44530) -# Python 3.11a1 3461 (JUMP_ABSOLUTE must jump backwards) -# Python 3.11a2 3462 (bpo-44511: remove COPY_DICT_WITHOUT_KEYS, change -# MATCH_CLASS and MATCH_KEYS, and add COPY) -# Python 3.11a3 3463 (bpo-45711: JUMP_IF_NOT_EXC_MATCH no longer pops the -# active exception) -# Python 3.11a3 3464 (bpo-45636: Merge numeric BINARY_*/INPLACE_* into -# BINARY_OP) -# Python 3.11a3 3465 (Add COPY_FREE_VARS opcode) -# Python 3.11a4 3466 (bpo-45292: PEP-654 except*) -# Python 3.11a4 3467 (Change CALL_xxx opcodes) -# Python 3.11a4 3468 (Add SEND opcode) -# Python 3.11a4 3469 (bpo-45711: remove type, traceback from exc_info) -# Python 3.11a4 3470 (bpo-46221: PREP_RERAISE_STAR no longer pushes lasti) -# Python 3.11a4 3471 (bpo-46202: remove pop POP_EXCEPT_AND_RERAISE) -# Python 3.11a4 3472 (bpo-46009: replace GEN_START with POP_TOP) -# Python 3.11a4 3473 (Add POP_JUMP_IF_NOT_NONE/POP_JUMP_IF_NONE opcodes) -# Python 3.11a4 3474 (Add RESUME opcode) -# Python 3.11a5 3475 (Add RETURN_GENERATOR opcode) -# Python 3.11a5 3476 (Add ASYNC_GEN_WRAP opcode) -# Python 3.11a5 3477 (Replace DUP_TOP/DUP_TOP_TWO with COPY and -# ROT_TWO/ROT_THREE/ROT_FOUR/ROT_N with SWAP) -# Python 3.11a5 3478 (New CALL opcodes) -# Python 3.11a5 3479 (Add PUSH_NULL opcode) -# Python 3.11a5 3480 (New CALL opcodes, second iteration) -# Python 3.11a5 3481 (Use inline cache for BINARY_OP) -# Python 3.11a5 3482 (Use inline caching for UNPACK_SEQUENCE and LOAD_GLOBAL) -# Python 3.11a5 3483 (Use inline caching for COMPARE_OP and BINARY_SUBSCR) -# Python 3.11a5 3484 (Use inline caching for LOAD_ATTR, LOAD_METHOD, and -# STORE_ATTR) -# Python 3.11a5 3485 (Add an oparg to GET_AWAITABLE) -# Python 3.11a6 3486 (Use inline caching for PRECALL and CALL) -# Python 3.11a6 3487 (Remove the adaptive "oparg counter" mechanism) -# Python 3.11a6 3488 (LOAD_GLOBAL can push additional NULL) -# Python 3.11a6 3489 (Add JUMP_BACKWARD, remove JUMP_ABSOLUTE) -# Python 3.11a6 3490 (remove JUMP_IF_NOT_EXC_MATCH, add CHECK_EXC_MATCH) -# Python 3.11a6 3491 (remove JUMP_IF_NOT_EG_MATCH, add CHECK_EG_MATCH, -# add JUMP_BACKWARD_NO_INTERRUPT, make JUMP_NO_INTERRUPT virtual) -# Python 3.11a7 3492 (make POP_JUMP_IF_NONE/NOT_NONE/TRUE/FALSE relative) -# Python 3.11a7 3493 (Make JUMP_IF_TRUE_OR_POP/JUMP_IF_FALSE_OR_POP relative) -# Python 3.11a7 3494 (New location info table) -# Python 3.11b4 3495 (Set line number of module's RESUME instr to 0 per PEP 626) -# Python 3.12a1 3500 (Remove PRECALL opcode) -# Python 3.12a1 3501 (YIELD_VALUE oparg == stack_depth) -# Python 3.12a1 3502 (LOAD_FAST_CHECK, no NULL-check in LOAD_FAST) -# Python 3.12a1 3503 (Shrink LOAD_METHOD cache) -# Python 3.12a1 3504 (Merge LOAD_METHOD back into LOAD_ATTR) -# Python 3.12a1 3505 (Specialization/Cache for FOR_ITER) -# Python 3.12a1 3506 (Add BINARY_SLICE and STORE_SLICE instructions) -# Python 3.12a1 3507 (Set lineno of module's RESUME to 0) -# Python 3.12a1 3508 (Add CLEANUP_THROW) -# Python 3.12a1 3509 (Conditional jumps only jump forward) -# Python 3.12a2 3510 (FOR_ITER leaves iterator on the stack) -# Python 3.12a2 3511 (Add STOPITERATION_ERROR instruction) -# Python 3.12a2 3512 (Remove all unused consts from code objects) -# Python 3.12a4 3513 (Add CALL_INTRINSIC_1 instruction, removed STOPITERATION_ERROR, PRINT_EXPR, IMPORT_STAR) -# Python 3.12a4 3514 (Remove ASYNC_GEN_WRAP, LIST_TO_TUPLE, and UNARY_POSITIVE) -# Python 3.12a5 3515 (Embed jump mask in COMPARE_OP oparg) -# Python 3.12a5 3516 (Add COMPARE_AND_BRANCH instruction) -# Python 3.12a5 3517 (Change YIELD_VALUE oparg to exception block depth) -# Python 3.12a6 3518 (Add RETURN_CONST instruction) -# Python 3.12a6 3519 (Modify SEND instruction) -# Python 3.12a6 3520 (Remove PREP_RERAISE_STAR, add CALL_INTRINSIC_2) -# Python 3.12a7 3521 (Shrink the LOAD_GLOBAL caches) -# Python 3.12a7 3522 (Removed JUMP_IF_FALSE_OR_POP/JUMP_IF_TRUE_OR_POP) -# Python 3.12a7 3523 (Convert COMPARE_AND_BRANCH back to COMPARE_OP) -# Python 3.12a7 3524 (Shrink the BINARY_SUBSCR caches) -# Python 3.12b1 3525 (Shrink the CALL caches) -# Python 3.12b1 3526 (Add instrumentation support) -# Python 3.12b1 3527 (Add LOAD_SUPER_ATTR) -# Python 3.12b1 3528 (Add LOAD_SUPER_ATTR_METHOD specialization) -# Python 3.12b1 3529 (Inline list/dict/set comprehensions) -# Python 3.12b1 3530 (Shrink the LOAD_SUPER_ATTR caches) -# Python 3.12b1 3531 (Add PEP 695 changes) -# Python 3.13a1 3550 (Plugin optimizer support) -# Python 3.13a1 3551 (Compact superinstructions) -# Python 3.13a1 3552 (Remove LOAD_FAST__LOAD_CONST and LOAD_CONST__LOAD_FAST) -# Python 3.13a1 3553 (Add SET_FUNCTION_ATTRIBUTE) -# Python 3.13a1 3554 (more efficient bytecodes for f-strings) -# Python 3.13a1 3555 (generate specialized opcodes metadata from bytecodes.c) -# Python 3.13a1 3556 (Convert LOAD_CLOSURE to a pseudo-op) -# Python 3.13a1 3557 (Make the conversion to boolean in jumps explicit) -# Python 3.13a1 3558 (Reorder the stack items for CALL) -# Python 3.13a1 3559 (Generate opcode IDs from bytecodes.c) -# Python 3.13a1 3560 (Add RESUME_CHECK instruction) -# Python 3.13a1 3561 (Add cache entry to branch instructions) -# Python 3.13a1 3562 (Assign opcode IDs for internal ops in separate range) -# Python 3.13a1 3563 (Add CALL_KW and remove KW_NAMES) -# Python 3.13a1 3564 (Removed oparg from YIELD_VALUE, changed oparg values of RESUME) -# Python 3.13a1 3565 (Oparg of YIELD_VALUE indicates whether it is in a yield-from) -# Python 3.13a1 3566 (Emit JUMP_NO_INTERRUPT instead of JUMP for non-loop no-lineno cases) -# Python 3.13a1 3567 (Reimplement line number propagation by the compiler) -# Python 3.13a1 3568 (Change semantics of END_FOR) -# Python 3.13a5 3569 (Specialize CONTAINS_OP) -# Python 3.13a6 3570 (Add __firstlineno__ class attribute) -# Python 3.14a1 3600 (Add LOAD_COMMON_CONSTANT) -# Python 3.14a1 3601 (Fix miscompilation of private names in generic classes) -# Python 3.14a1 3602 (Add LOAD_SPECIAL. Remove BEFORE_WITH and BEFORE_ASYNC_WITH) -# Python 3.14a1 3603 (Remove BUILD_CONST_KEY_MAP) - -# Python 3.15 will start with 3650 - -# Please don't copy-paste the same pre-release tag for new entries above!!! -# You should always use the *upcoming* tag. For example, if 3.12a6 came out -# a week ago, I should put "Python 3.12a7" next to my new magic number. - -# MAGIC must change whenever the bytecode emitted by the compiler may no -# longer be understood by older implementations of the eval loop (usually -# due to the addition of new opcodes). -# -# Starting with Python 3.11, Python 3.n starts with magic number 2900+50n. -# -# Whenever MAGIC_NUMBER is changed, the ranges in the magic_values array -# in PC/launcher.c must also be updated. - -MAGIC_NUMBER = (3603).to_bytes(2, 'little') + b'\r\n' - -_RAW_MAGIC_NUMBER = int.from_bytes(MAGIC_NUMBER, 'little') # For import.c +MAGIC_NUMBER = (_imp.pyc_magic_number).to_bytes(2, 'little') + b'\r\n' _PYCACHE = '__pycache__' _OPT = 'opt-' @@ -1133,7 +860,7 @@ def get_code(self, fullname): _imp.check_hash_based_pycs == 'always')): source_bytes = self.get_data(source_path) source_hash = _imp.source_hash( - _RAW_MAGIC_NUMBER, + _imp.pyc_magic_number_token, source_bytes, ) _validate_hash_pyc(data, source_hash, fullname, @@ -1162,7 +889,7 @@ def get_code(self, fullname): source_mtime is not None): if hash_based: if source_hash is None: - source_hash = _imp.source_hash(_RAW_MAGIC_NUMBER, + source_hash = _imp.source_hash(_imp.pyc_magic_number_token, source_bytes) data = _code_to_hash_pyc(code_object, source_hash, check_source) else: diff --git a/Lib/importlib/util.py b/Lib/importlib/util.py index 8403ef9b44ad1a..2b564e9b52e0cb 100644 --- a/Lib/importlib/util.py +++ b/Lib/importlib/util.py @@ -5,7 +5,6 @@ from ._bootstrap import spec_from_loader from ._bootstrap import _find_spec from ._bootstrap_external import MAGIC_NUMBER -from ._bootstrap_external import _RAW_MAGIC_NUMBER from ._bootstrap_external import cache_from_source from ._bootstrap_external import decode_source from ._bootstrap_external import source_from_cache @@ -18,7 +17,7 @@ def source_hash(source_bytes): "Return the hash of *source_bytes* as used in hash-based pyc files." - return _imp.source_hash(_RAW_MAGIC_NUMBER, source_bytes) + return _imp.source_hash(_imp.pyc_magic_number_token, source_bytes) def resolve_name(name, package): diff --git a/Lib/test/test_import/__init__.py b/Lib/test/test_import/__init__.py index e29097baaf53ae..56c6ffe93fce37 100644 --- a/Lib/test/test_import/__init__.py +++ b/Lib/test/test_import/__init__.py @@ -3113,6 +3113,15 @@ def test_pyimport_addmodule_create(self): self.assertIs(mod, sys.modules[name]) +@cpython_only +class TestMagicNumber(unittest.TestCase): + def test_magic_number_endianness(self): + magic_number = (_imp.pyc_magic_number).to_bytes(2, 'little') + b'\r\n' + raw_magic_number = int.from_bytes(magic_number, 'little') + + self.assertEqual(raw_magic_number, _imp.pyc_magic_number_token) + + if __name__ == '__main__': # Test needs to be a package, so we can do relative imports. unittest.main() diff --git a/Lib/zipimport.py b/Lib/zipimport.py index 68f031f89c9996..f2724dd0268358 100644 --- a/Lib/zipimport.py +++ b/Lib/zipimport.py @@ -705,7 +705,7 @@ def _unmarshal_code(self, pathname, fullpath, fullname, data): source_bytes = _get_pyc_source(self, fullpath) if source_bytes is not None: source_hash = _imp.source_hash( - _bootstrap_external._RAW_MAGIC_NUMBER, + _imp.pyc_magic_number_token, source_bytes, ) diff --git a/Python/import.c b/Python/import.c index 40b7feac001d6e..540874a0f0414f 100644 --- a/Python/import.c +++ b/Python/import.c @@ -6,6 +6,7 @@ #include "pycore_import.h" // _PyImport_BootstrapImp() #include "pycore_initconfig.h" // _PyStatus_OK() #include "pycore_interp.h" // struct _import_runtime_state +#include "pycore_magic_number.h" // PYC_MAGIC_NUMBER #include "pycore_namespace.h" // _PyNamespace_Type #include "pycore_object.h" // _Py_SetImmortal() #include "pycore_pyerrors.h" // _PyErr_SetString() @@ -2475,23 +2476,9 @@ _PyImport_GetBuiltinModuleNames(void) long PyImport_GetMagicNumber(void) { - long res; - PyInterpreterState *interp = _PyInterpreterState_GET(); - PyObject *external, *pyc_magic; - - external = PyObject_GetAttrString(IMPORTLIB(interp), "_bootstrap_external"); - if (external == NULL) - return -1; - pyc_magic = PyObject_GetAttrString(external, "_RAW_MAGIC_NUMBER"); - Py_DECREF(external); - if (pyc_magic == NULL) - return -1; - res = PyLong_AsLong(pyc_magic); - Py_DECREF(pyc_magic); - return res; + return PYC_MAGIC_NUMBER_TOKEN; } - extern const char * _PySys_ImplCacheTag; const char * @@ -4823,6 +4810,16 @@ imp_module_exec(PyObject *module) return -1; } + if (PyModule_AddIntConstant(module, "pyc_magic_number", PYC_MAGIC_NUMBER) < 0) { + return -1; + } + + if (PyModule_AddIntConstant( + module, "pyc_magic_number_token", PYC_MAGIC_NUMBER_TOKEN) < 0) + { + return -1; + } + return 0; } From c68cb8e0c9bd75ded25578c2fba6469e55a06e93 Mon Sep 17 00:00:00 2001 From: Thomas Grainger Date: Tue, 30 Jul 2024 20:42:25 +0100 Subject: [PATCH 28/70] Remove outdated note about instance methods from datamodel.rst (#122471) --- Doc/reference/datamodel.rst | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst index 144c6f78ccd443..2576f9a07284eb 100644 --- a/Doc/reference/datamodel.rst +++ b/Doc/reference/datamodel.rst @@ -730,14 +730,7 @@ When an instance method object is derived from a :class:`classmethod` object, th itself, so that calling either ``x.f(1)`` or ``C.f(1)`` is equivalent to calling ``f(C,1)`` where ``f`` is the underlying function. -Note that the transformation from :ref:`function object ` -to instance method -object happens each time the attribute is retrieved from the instance. In -some cases, a fruitful optimization is to assign the attribute to a local -variable and call that local variable. Also notice that this -transformation only happens for user-defined functions; other callable -objects (and all non-callable objects) are retrieved without -transformation. It is also important to note that user-defined functions +It is important to note that user-defined functions which are attributes of a class instance are not converted to bound methods; this *only* happens when the function is an attribute of the class. From 1cac0908fb6866c30b7fe106bc8d6cd03c7977f9 Mon Sep 17 00:00:00 2001 From: Nate Ohlson Date: Tue, 30 Jul 2024 14:49:15 -0500 Subject: [PATCH 29/70] gh-112301: Add argument aliases and tee compiler output for check warnings (GH-122465) Also remove superfluous shebang from the warning check script --- .github/workflows/reusable-ubuntu.yml | 2 +- Tools/build/check_warnings.py | 5 ++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/reusable-ubuntu.yml b/.github/workflows/reusable-ubuntu.yml index c6289a74e9a5f6..8dd5f559585368 100644 --- a/.github/workflows/reusable-ubuntu.yml +++ b/.github/workflows/reusable-ubuntu.yml @@ -75,7 +75,7 @@ jobs: ${{ fromJSON(inputs.free-threading) && '--disable-gil' || '' }} - name: Build CPython out-of-tree working-directory: ${{ env.CPYTHON_BUILDDIR }} - run: make -j4 &> compiler_output.txt + run: set -o pipefail; make -j4 2>&1 | tee compiler_output.txt - name: Display build info working-directory: ${{ env.CPYTHON_BUILDDIR }} run: make pythoninfo diff --git a/Tools/build/check_warnings.py b/Tools/build/check_warnings.py index f0c0067f4ab255..af9f7f169ad943 100644 --- a/Tools/build/check_warnings.py +++ b/Tools/build/check_warnings.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python3 """ Parses compiler output with -fdiagnostics-format=json and checks that warnings exist only in files that are expected to have warnings. @@ -114,24 +113,28 @@ def get_unexpected_improvements( def main(argv: list[str] | None = None) -> int: parser = argparse.ArgumentParser() parser.add_argument( + "-c", "--compiler-output-file-path", type=str, required=True, help="Path to the compiler output file", ) parser.add_argument( + "-i", "--warning-ignore-file-path", type=str, required=True, help="Path to the warning ignore file", ) parser.add_argument( + "-x", "--fail-on-regression", action="store_true", default=False, help="Flag to fail if new warnings are found", ) parser.add_argument( + "-X", "--fail-on-improvement", action="store_true", default=False, From 5912487938ac4b517209082ab9e6d2d3d0fb4f4d Mon Sep 17 00:00:00 2001 From: Petr Viktorin Date: Wed, 31 Jul 2024 00:11:00 +0200 Subject: [PATCH 30/70] gh-120906: Support arbitrary hashable keys in FrameLocalsProxy (GH-122309) Co-authored-by: Alyssa Coghlan --- Lib/test/test_frame.py | 127 ++++++++++++++++ ...-07-26-13-56-32.gh-issue-120906.qBh2I9.rst | 1 + Objects/frameobject.c | 140 ++++++++++-------- 3 files changed, 208 insertions(+), 60 deletions(-) create mode 100644 Misc/NEWS.d/next/Core_and_Builtins/2024-07-26-13-56-32.gh-issue-120906.qBh2I9.rst diff --git a/Lib/test/test_frame.py b/Lib/test/test_frame.py index b7ef6cefaabbc0..ca88e657367d9a 100644 --- a/Lib/test/test_frame.py +++ b/Lib/test/test_frame.py @@ -15,6 +15,7 @@ from test import support from test.support import import_helper, threading_helper from test.support.script_helper import assert_python_ok +from test import mapping_tests class ClearTest(unittest.TestCase): @@ -431,6 +432,132 @@ def test_is_mapping(self): kind = "other" self.assertEqual(kind, "mapping") + def _x_stringlikes(self): + class StringSubclass(str): + pass + + class ImpostorX: + def __hash__(self): + return hash('x') + + def __eq__(self, other): + return other == 'x' + + return StringSubclass('x'), ImpostorX(), 'x' + + def test_proxy_key_stringlikes_overwrite(self): + def f(obj): + x = 1 + proxy = sys._getframe().f_locals + proxy[obj] = 2 + return ( + list(proxy.keys()), + dict(proxy), + proxy + ) + + for obj in self._x_stringlikes(): + with self.subTest(cls=type(obj).__name__): + + keys_snapshot, proxy_snapshot, proxy = f(obj) + expected_keys = ['obj', 'x', 'proxy'] + expected_dict = {'obj': 'x', 'x': 2, 'proxy': proxy} + self.assertEqual(proxy.keys(), expected_keys) + self.assertEqual(proxy, expected_dict) + self.assertEqual(keys_snapshot, expected_keys) + self.assertEqual(proxy_snapshot, expected_dict) + + def test_proxy_key_stringlikes_ftrst_write(self): + def f(obj): + proxy = sys._getframe().f_locals + proxy[obj] = 2 + self.assertEqual(x, 2) + x = 1 + + for obj in self._x_stringlikes(): + with self.subTest(cls=type(obj).__name__): + f(obj) + + def test_proxy_key_unhashables(self): + class StringSubclass(str): + __hash__ = None + + class ObjectSubclass: + __hash__ = None + + proxy = sys._getframe().f_locals + + for obj in StringSubclass('x'), ObjectSubclass(): + with self.subTest(cls=type(obj).__name__): + with self.assertRaises(TypeError): + proxy[obj] + with self.assertRaises(TypeError): + proxy[obj] = 0 + + +class FrameLocalsProxyMappingTests(mapping_tests.TestHashMappingProtocol): + """Test that FrameLocalsProxy behaves like a Mapping (with exceptions)""" + + def _f(*args, **kwargs): + def _f(): + return sys._getframe().f_locals + return _f() + type2test = _f + + @unittest.skipIf(True, 'Locals proxies for different frames never compare as equal') + def test_constructor(self): + pass + + @unittest.skipIf(True, 'Unlike a mapping: del proxy[key] fails') + def test_write(self): + pass + + @unittest.skipIf(True, 'Unlike a mapping: no proxy.popitem') + def test_popitem(self): + pass + + @unittest.skipIf(True, 'Unlike a mapping: no proxy.pop') + def test_pop(self): + pass + + @unittest.skipIf(True, 'Unlike a mapping: no proxy.clear') + def test_clear(self): + pass + + @unittest.skipIf(True, 'Unlike a mapping: no proxy.fromkeys') + def test_fromkeys(self): + pass + + # no del + def test_getitem(self): + mapping_tests.BasicTestMappingProtocol.test_getitem(self) + d = self._full_mapping({'a': 1, 'b': 2}) + self.assertEqual(d['a'], 1) + self.assertEqual(d['b'], 2) + d['c'] = 3 + d['a'] = 4 + self.assertEqual(d['c'], 3) + self.assertEqual(d['a'], 4) + + @unittest.skipIf(True, 'Unlike a mapping: no proxy.update') + def test_update(self): + pass + + # proxy.copy returns a regular dict + def test_copy(self): + d = self._full_mapping({1:1, 2:2, 3:3}) + self.assertEqual(d.copy(), {1:1, 2:2, 3:3}) + d = self._empty_mapping() + self.assertEqual(d.copy(), d) + self.assertRaises(TypeError, d.copy, None) + + self.assertIsInstance(d.copy(), dict) + + @unittest.skipIf(True, 'Locals proxies for different frames never compare as equal') + def test_eq(self): + pass + + class TestFrameCApi(unittest.TestCase): def test_basic(self): x = 1 diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-07-26-13-56-32.gh-issue-120906.qBh2I9.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-07-26-13-56-32.gh-issue-120906.qBh2I9.rst new file mode 100644 index 00000000000000..2b753bc37d4a39 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-07-26-13-56-32.gh-issue-120906.qBh2I9.rst @@ -0,0 +1 @@ +:attr:`frame.f_locals` now supports arbitrary hashable objects as keys. diff --git a/Objects/frameobject.c b/Objects/frameobject.c index 88093eb9071ae4..a8be7d75371c16 100644 --- a/Objects/frameobject.c +++ b/Objects/frameobject.c @@ -53,22 +53,27 @@ static int framelocalsproxy_getkeyindex(PyFrameObject *frame, PyObject* key, bool read) { /* - * Returns the fast locals index of the key + * Returns -2 (!) if an error occurred; exception will be set. + * Returns the fast locals index of the key on success: * - if read == true, returns the index if the value is not NULL * - if read == false, returns the index if the value is not hidden + * Otherwise returns -1. */ - assert(PyUnicode_CheckExact(key)); - PyCodeObject *co = _PyFrame_GetCode(frame->f_frame); - int found_key = false; + + // Ensure that the key is hashable. + Py_hash_t key_hash = PyObject_Hash(key); + if (key_hash == -1) { + return -2; + } + bool found = false; // We do 2 loops here because it's highly possible the key is interned // and we can do a pointer comparison. for (int i = 0; i < co->co_nlocalsplus; i++) { PyObject *name = PyTuple_GET_ITEM(co->co_localsplusnames, i); if (name == key) { - found_key = true; if (read) { if (framelocalsproxy_getval(frame->f_frame, co, i) != NULL) { return i; @@ -78,23 +83,35 @@ framelocalsproxy_getkeyindex(PyFrameObject *frame, PyObject* key, bool read) return i; } } + found = true; } } - - if (!found_key) { - // This is unlikely, but we need to make sure. This means the key - // is not interned. - for (int i = 0; i < co->co_nlocalsplus; i++) { - PyObject *name = PyTuple_GET_ITEM(co->co_localsplusnames, i); - if (_PyUnicode_EQ(name, key)) { - if (read) { - if (framelocalsproxy_getval(frame->f_frame, co, i) != NULL) { - return i; - } - } else { - if (!(_PyLocals_GetKind(co->co_localspluskinds, i) & CO_FAST_HIDDEN)) { - return i; - } + if (found) { + // This is an attempt to read an unset local variable or + // write to a variable that is hidden from regular write operations + return -1; + } + // This is unlikely, but we need to make sure. This means the key + // is not interned. + for (int i = 0; i < co->co_nlocalsplus; i++) { + PyObject *name = PyTuple_GET_ITEM(co->co_localsplusnames, i); + Py_hash_t name_hash = PyObject_Hash(name); + assert(name_hash != -1); // keys are exact unicode + if (name_hash != key_hash) { + continue; + } + int same = PyObject_RichCompareBool(name, key, Py_EQ); + if (same < 0) { + return -2; + } + if (same) { + if (read) { + if (framelocalsproxy_getval(frame->f_frame, co, i) != NULL) { + return i; + } + } else { + if (!(_PyLocals_GetKind(co->co_localspluskinds, i) & CO_FAST_HIDDEN)) { + return i; } } } @@ -109,13 +126,14 @@ framelocalsproxy_getitem(PyObject *self, PyObject *key) PyFrameObject* frame = ((PyFrameLocalsProxyObject*)self)->frame; PyCodeObject* co = _PyFrame_GetCode(frame->f_frame); - if (PyUnicode_CheckExact(key)) { - int i = framelocalsproxy_getkeyindex(frame, key, true); - if (i >= 0) { - PyObject *value = framelocalsproxy_getval(frame->f_frame, co, i); - assert(value != NULL); - return Py_NewRef(value); - } + int i = framelocalsproxy_getkeyindex(frame, key, true); + if (i == -2) { + return NULL; + } + if (i >= 0) { + PyObject *value = framelocalsproxy_getval(frame->f_frame, co, i); + assert(value != NULL); + return Py_NewRef(value); } // Okay not in the fast locals, try extra locals @@ -145,37 +163,38 @@ framelocalsproxy_setitem(PyObject *self, PyObject *key, PyObject *value) return -1; } - if (PyUnicode_CheckExact(key)) { - int i = framelocalsproxy_getkeyindex(frame, key, false); - if (i >= 0) { - _Py_Executors_InvalidateDependency(PyInterpreterState_Get(), co, 1); - - _PyLocals_Kind kind = _PyLocals_GetKind(co->co_localspluskinds, i); - _PyStackRef oldvalue = fast[i]; - PyObject *cell = NULL; - if (kind == CO_FAST_FREE) { - // The cell was set when the frame was created from - // the function's closure. - assert(oldvalue.bits != 0 && PyCell_Check(PyStackRef_AsPyObjectBorrow(oldvalue))); - cell = PyStackRef_AsPyObjectBorrow(oldvalue); - } else if (kind & CO_FAST_CELL && oldvalue.bits != 0) { - PyObject *as_obj = PyStackRef_AsPyObjectBorrow(oldvalue); - if (PyCell_Check(as_obj)) { - cell = as_obj; - } + int i = framelocalsproxy_getkeyindex(frame, key, false); + if (i == -2) { + return -1; + } + if (i >= 0) { + _Py_Executors_InvalidateDependency(PyInterpreterState_Get(), co, 1); + + _PyLocals_Kind kind = _PyLocals_GetKind(co->co_localspluskinds, i); + _PyStackRef oldvalue = fast[i]; + PyObject *cell = NULL; + if (kind == CO_FAST_FREE) { + // The cell was set when the frame was created from + // the function's closure. + assert(oldvalue.bits != 0 && PyCell_Check(PyStackRef_AsPyObjectBorrow(oldvalue))); + cell = PyStackRef_AsPyObjectBorrow(oldvalue); + } else if (kind & CO_FAST_CELL && oldvalue.bits != 0) { + PyObject *as_obj = PyStackRef_AsPyObjectBorrow(oldvalue); + if (PyCell_Check(as_obj)) { + cell = as_obj; } - if (cell != NULL) { - PyObject *oldvalue_o = PyCell_GET(cell); - if (value != oldvalue_o) { - PyCell_SET(cell, Py_XNewRef(value)); - Py_XDECREF(oldvalue_o); - } - } else if (value != PyStackRef_AsPyObjectBorrow(oldvalue)) { - PyStackRef_XCLOSE(fast[i]); - fast[i] = PyStackRef_FromPyObjectNew(value); + } + if (cell != NULL) { + PyObject *oldvalue_o = PyCell_GET(cell); + if (value != oldvalue_o) { + PyCell_SET(cell, Py_XNewRef(value)); + Py_XDECREF(oldvalue_o); } - return 0; + } else if (value != PyStackRef_AsPyObjectBorrow(oldvalue)) { + PyStackRef_XCLOSE(fast[i]); + fast[i] = PyStackRef_FromPyObjectNew(value); } + return 0; } // Okay not in the fast locals, try extra locals @@ -545,11 +564,12 @@ framelocalsproxy_contains(PyObject *self, PyObject *key) { PyFrameObject *frame = ((PyFrameLocalsProxyObject*)self)->frame; - if (PyUnicode_CheckExact(key)) { - int i = framelocalsproxy_getkeyindex(frame, key, true); - if (i >= 0) { - return 1; - } + int i = framelocalsproxy_getkeyindex(frame, key, true); + if (i == -2) { + return -1; + } + if (i >= 0) { + return 1; } PyObject *extra = ((PyFrameObject*)frame)->f_extra_locals; From 097633981879b3c9de9a1dd120d3aa585ecc2384 Mon Sep 17 00:00:00 2001 From: Petr Viktorin Date: Wed, 31 Jul 2024 00:19:48 +0200 Subject: [PATCH 31/70] gh-121650: Encode newlines in headers, and verify headers are sound (GH-122233) ## Encode header parts that contain newlines Per RFC 2047: > [...] these encoding schemes allow the > encoding of arbitrary octet values, mail readers that implement this > decoding should also ensure that display of the decoded data on the > recipient's terminal will not cause unwanted side-effects It seems that the "quoted-word" scheme is a valid way to include a newline character in a header value, just like we already allow undecodable bytes or control characters. They do need to be properly quoted when serialized to text, though. ## Verify that email headers are well-formed This should fail for custom fold() implementations that aren't careful about newlines. Co-authored-by: Bas Bloemsaat Co-authored-by: Serhiy Storchaka --- Doc/library/email.errors.rst | 7 +++ Doc/library/email.policy.rst | 18 ++++++ Doc/whatsnew/3.13.rst | 9 +++ Lib/email/_header_value_parser.py | 12 +++- Lib/email/_policybase.py | 8 +++ Lib/email/errors.py | 4 ++ Lib/email/generator.py | 13 +++- Lib/test/test_email/test_generator.py | 62 +++++++++++++++++++ Lib/test/test_email/test_policy.py | 26 ++++++++ ...-07-27-16-10-41.gh-issue-121650.nf6oc9.rst | 5 ++ 10 files changed, 160 insertions(+), 4 deletions(-) create mode 100644 Misc/NEWS.d/next/Library/2024-07-27-16-10-41.gh-issue-121650.nf6oc9.rst diff --git a/Doc/library/email.errors.rst b/Doc/library/email.errors.rst index 33ab4265116178..f8f43d82a3df2e 100644 --- a/Doc/library/email.errors.rst +++ b/Doc/library/email.errors.rst @@ -58,6 +58,13 @@ The following exception classes are defined in the :mod:`email.errors` module: :class:`~email.mime.nonmultipart.MIMENonMultipart` (e.g. :class:`~email.mime.image.MIMEImage`). + +.. exception:: HeaderWriteError() + + Raised when an error occurs when the :mod:`~email.generator` outputs + headers. + + .. exception:: MessageDefect() This is the base class for all defects found when parsing email messages. diff --git a/Doc/library/email.policy.rst b/Doc/library/email.policy.rst index 83feedf728351e..314767d0802a08 100644 --- a/Doc/library/email.policy.rst +++ b/Doc/library/email.policy.rst @@ -229,6 +229,24 @@ added matters. To illustrate:: .. versionadded:: 3.6 + + .. attribute:: verify_generated_headers + + If ``True`` (the default), the generator will raise + :exc:`~email.errors.HeaderWriteError` instead of writing a header + that is improperly folded or delimited, such that it would + be parsed as multiple headers or joined with adjacent data. + Such headers can be generated by custom header classes or bugs + in the ``email`` module. + + As it's a security feature, this defaults to ``True`` even in the + :class:`~email.policy.Compat32` policy. + For backwards compatible, but unsafe, behavior, it must be set to + ``False`` explicitly. + + .. versionadded:: 3.13 + + The following :class:`Policy` method is intended to be called by code using the email library to create policy instances with custom settings: diff --git a/Doc/whatsnew/3.13.rst b/Doc/whatsnew/3.13.rst index fbf19d1c9598e1..5761712a3c714b 100644 --- a/Doc/whatsnew/3.13.rst +++ b/Doc/whatsnew/3.13.rst @@ -736,6 +736,15 @@ doctest email ----- +* Headers with embedded newlines are now quoted on output. + + The :mod:`~email.generator` will now refuse to serialize (write) headers + that are improperly folded or delimited, such that they would be parsed as + multiple headers or joined with adjacent data. + If you need to turn this safety feature off, + set :attr:`~email.policy.Policy.verify_generated_headers`. + (Contributed by Bas Bloemsaat and Petr Viktorin in :gh:`121650`.) + * :func:`email.utils.getaddresses` and :func:`email.utils.parseaddr` now return ``('', '')`` 2-tuples in more situations where invalid email addresses are encountered instead of potentially inaccurate values. Add optional *strict* diff --git a/Lib/email/_header_value_parser.py b/Lib/email/_header_value_parser.py index 7da1bbaf8a80d7..ec2215a5e5f33c 100644 --- a/Lib/email/_header_value_parser.py +++ b/Lib/email/_header_value_parser.py @@ -92,6 +92,8 @@ ASPECIALS = TSPECIALS | set("*'%") ATTRIBUTE_ENDS = ASPECIALS | WSP EXTENDED_ATTRIBUTE_ENDS = ATTRIBUTE_ENDS - set('%') +NLSET = {'\n', '\r'} +SPECIALSNL = SPECIALS | NLSET def quote_string(value): return '"'+str(value).replace('\\', '\\\\').replace('"', r'\"')+'"' @@ -2802,9 +2804,13 @@ def _refold_parse_tree(parse_tree, *, policy): wrap_as_ew_blocked -= 1 continue tstr = str(part) - if part.token_type == 'ptext' and set(tstr) & SPECIALS: - # Encode if tstr contains special characters. - want_encoding = True + if not want_encoding: + if part.token_type == 'ptext': + # Encode if tstr contains special characters. + want_encoding = not SPECIALSNL.isdisjoint(tstr) + else: + # Encode if tstr contains newlines. + want_encoding = not NLSET.isdisjoint(tstr) try: tstr.encode(encoding) charset = encoding diff --git a/Lib/email/_policybase.py b/Lib/email/_policybase.py index 1c76ed63b61ae8..c7694a44e26639 100644 --- a/Lib/email/_policybase.py +++ b/Lib/email/_policybase.py @@ -157,6 +157,13 @@ class Policy(_PolicyBase, metaclass=abc.ABCMeta): message_factory -- the class to use to create new message objects. If the value is None, the default is Message. + verify_generated_headers + -- if true, the generator verifies that each header + they are properly folded, so that a parser won't + treat it as multiple headers, start-of-body, or + part of another header. + This is a check against custom Header & fold() + implementations. """ raise_on_defect = False @@ -165,6 +172,7 @@ class Policy(_PolicyBase, metaclass=abc.ABCMeta): max_line_length = 78 mangle_from_ = False message_factory = None + verify_generated_headers = True def handle_defect(self, obj, defect): """Based on policy, either raise defect or call register_defect. diff --git a/Lib/email/errors.py b/Lib/email/errors.py index 3ad00565549968..02aa5eced6ae46 100644 --- a/Lib/email/errors.py +++ b/Lib/email/errors.py @@ -29,6 +29,10 @@ class CharsetError(MessageError): """An illegal charset was given.""" +class HeaderWriteError(MessageError): + """Error while writing headers.""" + + # These are parsing defects which the parser was able to work around. class MessageDefect(ValueError): """Base class for a message defect.""" diff --git a/Lib/email/generator.py b/Lib/email/generator.py index 9d058ceada24f8..42c84aa4da1044 100644 --- a/Lib/email/generator.py +++ b/Lib/email/generator.py @@ -14,12 +14,14 @@ from copy import deepcopy from io import StringIO, BytesIO from email.utils import _has_surrogates +from email.errors import HeaderWriteError UNDERSCORE = '_' NL = '\n' # XXX: no longer used by the code below. NLCRE = re.compile(r'\r\n|\r|\n') fcre = re.compile(r'^From ', re.MULTILINE) +NEWLINE_WITHOUT_FWSP = re.compile(r'\r\n[^ \t]|\r[^ \n\t]|\n[^ \t]') class Generator: @@ -222,7 +224,16 @@ def _dispatch(self, msg): def _write_headers(self, msg): for h, v in msg.raw_items(): - self.write(self.policy.fold(h, v)) + folded = self.policy.fold(h, v) + if self.policy.verify_generated_headers: + linesep = self.policy.linesep + if not folded.endswith(self.policy.linesep): + raise HeaderWriteError( + f'folded header does not end with {linesep!r}: {folded!r}') + if NEWLINE_WITHOUT_FWSP.search(folded.removesuffix(linesep)): + raise HeaderWriteError( + f'folded header contains newline: {folded!r}') + self.write(folded) # A blank line always separates headers from body self.write(self._NL) diff --git a/Lib/test/test_email/test_generator.py b/Lib/test/test_email/test_generator.py index bc6f734d4fd0a9..c75a842c33578e 100644 --- a/Lib/test/test_email/test_generator.py +++ b/Lib/test/test_email/test_generator.py @@ -6,6 +6,7 @@ from email.generator import Generator, BytesGenerator from email.headerregistry import Address from email import policy +import email.errors from test.test_email import TestEmailBase, parameterize @@ -249,6 +250,44 @@ def test_rfc2231_wrapping_switches_to_default_len_if_too_narrow(self): g.flatten(msg) self.assertEqual(s.getvalue(), self.typ(expected)) + def test_keep_encoded_newlines(self): + msg = self.msgmaker(self.typ(textwrap.dedent("""\ + To: nobody + Subject: Bad subject=?UTF-8?Q?=0A?=Bcc: injection@example.com + + None + """))) + expected = textwrap.dedent("""\ + To: nobody + Subject: Bad subject=?UTF-8?Q?=0A?=Bcc: injection@example.com + + None + """) + s = self.ioclass() + g = self.genclass(s, policy=self.policy.clone(max_line_length=80)) + g.flatten(msg) + self.assertEqual(s.getvalue(), self.typ(expected)) + + def test_keep_long_encoded_newlines(self): + msg = self.msgmaker(self.typ(textwrap.dedent("""\ + To: nobody + Subject: Bad subject=?UTF-8?Q?=0A?=Bcc: injection@example.com + + None + """))) + expected = textwrap.dedent("""\ + To: nobody + Subject: Bad subject + =?utf-8?q?=0A?=Bcc: + injection@example.com + + None + """) + s = self.ioclass() + g = self.genclass(s, policy=self.policy.clone(max_line_length=30)) + g.flatten(msg) + self.assertEqual(s.getvalue(), self.typ(expected)) + class TestGenerator(TestGeneratorBase, TestEmailBase): @@ -273,6 +312,29 @@ def test_flatten_unicode_linesep(self): g.flatten(msg) self.assertEqual(s.getvalue(), self.typ(expected)) + def test_verify_generated_headers(self): + """gh-121650: by default the generator prevents header injection""" + class LiteralHeader(str): + name = 'Header' + def fold(self, **kwargs): + return self + + for text in ( + 'Value\r\nBad Injection\r\n', + 'NoNewLine' + ): + with self.subTest(text=text): + message = message_from_string( + "Header: Value\r\n\r\nBody", + policy=self.policy, + ) + + del message['Header'] + message['Header'] = LiteralHeader(text) + + with self.assertRaises(email.errors.HeaderWriteError): + message.as_string() + class TestBytesGenerator(TestGeneratorBase, TestEmailBase): diff --git a/Lib/test/test_email/test_policy.py b/Lib/test/test_email/test_policy.py index c6b9c80efe1b54..baa35fd68e49c5 100644 --- a/Lib/test/test_email/test_policy.py +++ b/Lib/test/test_email/test_policy.py @@ -26,6 +26,7 @@ class PolicyAPITests(unittest.TestCase): 'raise_on_defect': False, 'mangle_from_': True, 'message_factory': None, + 'verify_generated_headers': True, } # These default values are the ones set on email.policy.default. # If any of these defaults change, the docs must be updated. @@ -294,6 +295,31 @@ def test_short_maxlen_error(self): with self.assertRaises(email.errors.HeaderParseError): policy.fold("Subject", subject) + def test_verify_generated_headers(self): + """Turning protection off allows header injection""" + policy = email.policy.default.clone(verify_generated_headers=False) + for text in ( + 'Header: Value\r\nBad: Injection\r\n', + 'Header: NoNewLine' + ): + with self.subTest(text=text): + message = email.message_from_string( + "Header: Value\r\n\r\nBody", + policy=policy, + ) + class LiteralHeader(str): + name = 'Header' + def fold(self, **kwargs): + return self + + del message['Header'] + message['Header'] = LiteralHeader(text) + + self.assertEqual( + message.as_string(), + f"{text}\nBody", + ) + # XXX: Need subclassing tests. # For adding subclassed objects, make sure the usual rules apply (subclass # wins), but that the order still works (right overrides left). diff --git a/Misc/NEWS.d/next/Library/2024-07-27-16-10-41.gh-issue-121650.nf6oc9.rst b/Misc/NEWS.d/next/Library/2024-07-27-16-10-41.gh-issue-121650.nf6oc9.rst new file mode 100644 index 00000000000000..83dd28d4ac575b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-07-27-16-10-41.gh-issue-121650.nf6oc9.rst @@ -0,0 +1,5 @@ +:mod:`email` headers with embedded newlines are now quoted on output. The +:mod:`~email.generator` will now refuse to serialize (write) headers that +are unsafely folded or delimited; see +:attr:`~email.policy.Policy.verify_generated_headers`. (Contributed by Bas +Bloemsaat and Petr Viktorin in :gh:`121650`.) From 29c04dfa2718dd25ad8b381a1027045b312f9739 Mon Sep 17 00:00:00 2001 From: Terry Jan Reedy Date: Tue, 30 Jul 2024 18:29:52 -0400 Subject: [PATCH 32/70] GH-122482: Make About IDLE direct discussion to DPO (#122483) Currently, idle-dev@python.org and idle-dev mailing list serve to collect spam (90+%). Change About IDLE to direct discussions to discuss.python.org. Users are already doing so. --- Lib/idlelib/News3.txt | 3 +++ Lib/idlelib/help_about.py | 15 ++++++++------- ...2024-07-30-18-02-55.gh-issue-122482.TerE0g.rst | 2 ++ 3 files changed, 13 insertions(+), 7 deletions(-) create mode 100644 Misc/NEWS.d/next/IDLE/2024-07-30-18-02-55.gh-issue-122482.TerE0g.rst diff --git a/Lib/idlelib/News3.txt b/Lib/idlelib/News3.txt index 68702ac8fb9157..a7a92e97b6c244 100644 --- a/Lib/idlelib/News3.txt +++ b/Lib/idlelib/News3.txt @@ -4,6 +4,9 @@ Released on 2024-10-xx ========================= +gh-122482: Change About IDLE to direct users to discuss.python.org +instead of the now unused idle-dev email and mailing list. + gh-78889: Stop Shell freezes by blocking user access to non-method sys.stdout.shell attributes, which are all private. diff --git a/Lib/idlelib/help_about.py b/Lib/idlelib/help_about.py index aa1c352897f9e7..81c65f6264e7b9 100644 --- a/Lib/idlelib/help_about.py +++ b/Lib/idlelib/help_about.py @@ -85,15 +85,18 @@ def create_widgets(self): byline = Label(frame_background, text=byline_text, justify=LEFT, fg=self.fg, bg=self.bg) byline.grid(row=2, column=0, sticky=W, columnspan=3, padx=10, pady=5) - email = Label(frame_background, text='email: idle-dev@python.org', - justify=LEFT, fg=self.fg, bg=self.bg) - email.grid(row=6, column=0, columnspan=2, sticky=W, padx=10, pady=0) + + forums_url = "https://discuss.python.org" + forums = Label(frame_background, text="Python forums: "+forums_url, + justify=LEFT, fg=self.fg, bg=self.bg) + forums.grid(row=6, column=0, sticky=W, padx=10, pady=0) + forums.bind("", lambda event: webbrowser.open(forums_url)) docs_url = ("https://docs.python.org/%d.%d/library/idle.html" % sys.version_info[:2]) docs = Label(frame_background, text=docs_url, justify=LEFT, fg=self.fg, bg=self.bg) docs.grid(row=7, column=0, columnspan=2, sticky=W, padx=10, pady=0) - docs.bind("", lambda event: webbrowser.open(docs['text'])) + docs.bind("", lambda event: webbrowser.open(docs_url)) Frame(frame_background, borderwidth=1, relief=SUNKEN, height=2, bg=self.bg).grid(row=8, column=0, sticky=EW, @@ -123,9 +126,7 @@ def create_widgets(self): height=2, bg=self.bg).grid(row=11, column=0, sticky=EW, columnspan=3, padx=5, pady=5) - idle = Label(frame_background, - text='IDLE', - fg=self.fg, bg=self.bg) + idle = Label(frame_background, text='IDLE', fg=self.fg, bg=self.bg) idle.grid(row=12, column=0, sticky=W, padx=10, pady=0) idle_buttons = Frame(frame_background, bg=self.bg) idle_buttons.grid(row=13, column=0, columnspan=3, sticky=NSEW) diff --git a/Misc/NEWS.d/next/IDLE/2024-07-30-18-02-55.gh-issue-122482.TerE0g.rst b/Misc/NEWS.d/next/IDLE/2024-07-30-18-02-55.gh-issue-122482.TerE0g.rst new file mode 100644 index 00000000000000..8a11e73305992f --- /dev/null +++ b/Misc/NEWS.d/next/IDLE/2024-07-30-18-02-55.gh-issue-122482.TerE0g.rst @@ -0,0 +1,2 @@ +Change About IDLE to direct users to discuss.python.org instead of the now +unused idle-dev email and mailing list. From 82db5728136ebec3a1d221570b810b4128a21255 Mon Sep 17 00:00:00 2001 From: Malcolm Smith Date: Wed, 31 Jul 2024 01:21:43 +0100 Subject: [PATCH 33/70] gh-116622: Fix testPyObjectPrintOSError on Android (#122487) Adds extra handling for way BSD/Android return errors from calls to fwrite. --- Android/android.py | 25 ++++++++++++++----- Android/testbed/app/build.gradle.kts | 9 ++++++- ...-07-30-23-48-26.gh-issue-116622.yTTtil.rst | 3 +++ Objects/object.c | 11 ++++++-- 4 files changed, 39 insertions(+), 9 deletions(-) create mode 100644 Misc/NEWS.d/next/C API/2024-07-30-23-48-26.gh-issue-116622.yTTtil.rst diff --git a/Android/android.py b/Android/android.py index 0a1393e61ddb0e..a78b15c9c4e58c 100755 --- a/Android/android.py +++ b/Android/android.py @@ -1,6 +1,7 @@ #!/usr/bin/env python3 import argparse +from glob import glob import os import re import shutil @@ -16,16 +17,21 @@ CROSS_BUILD_DIR = CHECKOUT / "cross-build" -def delete_if_exists(path): - if path.exists(): +def delete_glob(pattern): + # Path.glob doesn't accept non-relative patterns. + for path in glob(str(pattern)): + path = Path(path) print(f"Deleting {path} ...") - shutil.rmtree(path) + if path.is_dir() and not path.is_symlink(): + shutil.rmtree(path) + else: + path.unlink() def subdir(name, *, clean=None): path = CROSS_BUILD_DIR / name if clean: - delete_if_exists(path) + delete_glob(path) if not path.exists(): if clean is None: sys.exit( @@ -150,10 +156,17 @@ def configure_host_python(context): def make_host_python(context): + # The CFLAGS and LDFLAGS set in android-env include the prefix dir, so + # delete any previously-installed Python libs and include files to prevent + # them being used during the build. host_dir = subdir(context.host) + prefix_dir = host_dir / "prefix" + delete_glob(f"{prefix_dir}/include/python*") + delete_glob(f"{prefix_dir}/lib/libpython*") + os.chdir(host_dir / "build") run(["make", "-j", str(os.cpu_count())], host=context.host) - run(["make", "install", f"prefix={host_dir}/prefix"], host=context.host) + run(["make", "install", f"prefix={prefix_dir}"], host=context.host) def build_all(context): @@ -164,7 +177,7 @@ def build_all(context): def clean_all(context): - delete_if_exists(CROSS_BUILD_DIR) + delete_glob(CROSS_BUILD_DIR) # To avoid distributing compiled artifacts without corresponding source code, diff --git a/Android/testbed/app/build.gradle.kts b/Android/testbed/app/build.gradle.kts index 7690d3fd86b2fd..7320b21e98bbd1 100644 --- a/Android/testbed/app/build.gradle.kts +++ b/Android/testbed/app/build.gradle.kts @@ -7,10 +7,17 @@ plugins { val PYTHON_DIR = File(projectDir, "../../..").canonicalPath val PYTHON_CROSS_DIR = "$PYTHON_DIR/cross-build" + val ABIS = mapOf( "arm64-v8a" to "aarch64-linux-android", "x86_64" to "x86_64-linux-android", -) +).filter { File("$PYTHON_CROSS_DIR/${it.value}").exists() } +if (ABIS.isEmpty()) { + throw GradleException( + "No Android ABIs found in $PYTHON_CROSS_DIR: see Android/README.md " + + "for building instructions." + ) +} val PYTHON_VERSION = File("$PYTHON_DIR/Include/patchlevel.h").useLines { for (line in it) { diff --git a/Misc/NEWS.d/next/C API/2024-07-30-23-48-26.gh-issue-116622.yTTtil.rst b/Misc/NEWS.d/next/C API/2024-07-30-23-48-26.gh-issue-116622.yTTtil.rst new file mode 100644 index 00000000000000..7ae0f83f37bd62 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2024-07-30-23-48-26.gh-issue-116622.yTTtil.rst @@ -0,0 +1,3 @@ +Make :any:`PyObject_Print` work around a bug in Android and OpenBSD which +prevented it from throwing an exception when trying to write to a read-only +stream. diff --git a/Objects/object.c b/Objects/object.c index 8a648a46487910..db9d3e46795668 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -536,6 +536,7 @@ int PyObject_Print(PyObject *op, FILE *fp, int flags) { int ret = 0; + int write_error = 0; if (PyErr_CheckSignals()) return -1; #ifdef USE_STACKCHECK @@ -574,14 +575,20 @@ PyObject_Print(PyObject *op, FILE *fp, int flags) ret = -1; } else { - fwrite(t, 1, len, fp); + /* Versions of Android and OpenBSD from before 2023 fail to + set the `ferror` indicator when writing to a read-only + stream, so we need to check the return value. + (https://github.com/openbsd/src/commit/fc99cf9338942ecd9adc94ea08bf6188f0428c15) */ + if (fwrite(t, 1, len, fp) != (size_t)len) { + write_error = 1; + } } Py_DECREF(s); } } } if (ret == 0) { - if (ferror(fp)) { + if (write_error || ferror(fp)) { PyErr_SetFromErrno(PyExc_OSError); clearerr(fp); ret = -1; From a9344cdffa30fdf60154d645f9e74ab3d67ae2e9 Mon Sep 17 00:00:00 2001 From: Cody Maloney Date: Tue, 30 Jul 2024 18:39:54 -0700 Subject: [PATCH 34/70] gh-121381 Remove subprocess._USE_VFORK escape hatch (#121383) This flag was added as an escape hatch in gh-91401 and backported to Python 3.10. The flag broke at some point between its addition and now. As there is currently no publicly known environments that require this, remove it rather than work on fixing it. This leaves the flag in the subprocess module to not break code which may have used / checked the flag itself. discussion: https://discuss.python.org/t/subprocess-use-vfork-escape-hatch-broken-fix-or-remove/56915/2 --- Doc/library/subprocess.rst | 25 ++++--------------- Lib/multiprocessing/util.py | 3 +-- Lib/subprocess.py | 3 +-- Lib/test/test_capi/test_misc.py | 6 ++--- Lib/test/test_subprocess.py | 22 +--------------- ...-06-28-23-17-22.gh-issue-121381.i2xL7P.rst | 2 ++ Modules/_posixsubprocess.c | 7 +++--- Modules/clinic/_posixsubprocess.c.h | 15 ++++------- 8 files changed, 21 insertions(+), 62 deletions(-) create mode 100644 Misc/NEWS.d/next/Core and Builtins/2024-06-28-23-17-22.gh-issue-121381.i2xL7P.rst diff --git a/Doc/library/subprocess.rst b/Doc/library/subprocess.rst index a0ba97f429bec9..f10a8085e64244 100644 --- a/Doc/library/subprocess.rst +++ b/Doc/library/subprocess.rst @@ -1561,36 +1561,22 @@ runtime): Module which provides function to parse and escape command lines. -.. _disable_vfork: .. _disable_posix_spawn: -Disabling use of ``vfork()`` or ``posix_spawn()`` -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Disable use of ``posix_spawn()`` +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ On Linux, :mod:`subprocess` defaults to using the ``vfork()`` system call internally when it is safe to do so rather than ``fork()``. This greatly improves performance. -If you ever encounter a presumed highly unusual situation where you need to -prevent ``vfork()`` from being used by Python, you can set the -:const:`subprocess._USE_VFORK` attribute to a false value. - -:: - - subprocess._USE_VFORK = False # See CPython issue gh-NNNNNN. - -Setting this has no impact on use of ``posix_spawn()`` which could use -``vfork()`` internally within its libc implementation. There is a similar -:const:`subprocess._USE_POSIX_SPAWN` attribute if you need to prevent use of -that. - :: subprocess._USE_POSIX_SPAWN = False # See CPython issue gh-NNNNNN. -It is safe to set these to false on any Python version. They will have no -effect on older versions when unsupported. Do not assume the attributes are -available to read. Despite their names, a true value does not indicate that the +It is safe to set this to false on any Python version. It will have no +effect on older or newer versions where unsupported. Do not assume the attribute +is available to read. Despite the name, a true value does not indicate the corresponding function will be used, only that it may be. Please file issues any time you have to use these private knobs with a way to @@ -1598,4 +1584,3 @@ reproduce the issue you were seeing. Link to that issue from a comment in your code. .. versionadded:: 3.8 ``_USE_POSIX_SPAWN`` -.. versionadded:: 3.11 ``_USE_VFORK`` diff --git a/Lib/multiprocessing/util.py b/Lib/multiprocessing/util.py index 4f471fbde71ace..d48ef8a86b34e1 100644 --- a/Lib/multiprocessing/util.py +++ b/Lib/multiprocessing/util.py @@ -445,8 +445,7 @@ def spawnv_passfds(path, args, passfds): return _posixsubprocess.fork_exec( args, [path], True, passfds, None, None, -1, -1, -1, -1, -1, -1, errpipe_read, errpipe_write, - False, False, -1, None, None, None, -1, None, - subprocess._USE_VFORK) + False, False, -1, None, None, None, -1, None) finally: os.close(errpipe_read) os.close(errpipe_write) diff --git a/Lib/subprocess.py b/Lib/subprocess.py index bc08878db313df..88f0230b05fbc7 100644 --- a/Lib/subprocess.py +++ b/Lib/subprocess.py @@ -749,7 +749,6 @@ def _use_posix_spawn(): # These are primarily fail-safe knobs for negatives. A True value does not # guarantee the given libc/syscall API will be used. _USE_POSIX_SPAWN = _use_posix_spawn() -_USE_VFORK = True _HAVE_POSIX_SPAWN_CLOSEFROM = hasattr(os, 'POSIX_SPAWN_CLOSEFROM') @@ -1902,7 +1901,7 @@ def _execute_child(self, args, executable, preexec_fn, close_fds, errpipe_read, errpipe_write, restore_signals, start_new_session, process_group, gid, gids, uid, umask, - preexec_fn, _USE_VFORK) + preexec_fn) self._child_created = True finally: # be sure the FD is closed no matter what diff --git a/Lib/test/test_capi/test_misc.py b/Lib/test/test_capi/test_misc.py index 9de97c0c2c776a..5c4547da1bdc53 100644 --- a/Lib/test/test_capi/test_misc.py +++ b/Lib/test/test_capi/test_misc.py @@ -120,7 +120,7 @@ def __len__(self): return 1 with self.assertRaisesRegex(TypeError, 'indexing'): _posixsubprocess.fork_exec( - 1,Z(),True,(1, 2),5,6,7,8,9,10,11,12,13,14,True,True,17,False,19,20,21,22,False) + 1,Z(),True,(1, 2),5,6,7,8,9,10,11,12,13,14,True,True,17,False,19,20,21,22) # Issue #15736: overflow in _PySequence_BytesToCharpArray() class Z(object): def __len__(self): @@ -128,7 +128,7 @@ def __len__(self): def __getitem__(self, i): return b'x' self.assertRaises(MemoryError, _posixsubprocess.fork_exec, - 1,Z(),True,(1, 2),5,6,7,8,9,10,11,12,13,14,True,True,17,False,19,20,21,22,False) + 1,Z(),True,(1, 2),5,6,7,8,9,10,11,12,13,14,True,True,17,False,19,20,21,22) @unittest.skipUnless(_posixsubprocess, '_posixsubprocess required for this test.') def test_subprocess_fork_exec(self): @@ -138,7 +138,7 @@ def __len__(self): # Issue #15738: crash in subprocess_fork_exec() self.assertRaises(TypeError, _posixsubprocess.fork_exec, - Z(),[b'1'],True,(1, 2),5,6,7,8,9,10,11,12,13,14,True,True,17,False,19,20,21,22,False) + Z(),[b'1'],True,(1, 2),5,6,7,8,9,10,11,12,13,14,True,True,17,False,19,20,21,22) @unittest.skipIf(MISSING_C_DOCSTRINGS, "Signature information for builtins requires docstrings") diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py index 9412a2d737bb2e..f065b9c9bb1c2c 100644 --- a/Lib/test/test_subprocess.py +++ b/Lib/test/test_subprocess.py @@ -3278,7 +3278,7 @@ def __int__(self): 1, 2, 3, 4, True, True, 0, None, None, None, -1, - None, True) + None) self.assertIn('fds_to_keep', str(c.exception)) finally: if not gc_enabled: @@ -3413,25 +3413,6 @@ def __del__(self): self.assertEqual(out.strip(), b"OK") self.assertIn(b"preexec_fn not supported at interpreter shutdown", err) - @unittest.skipIf(not sysconfig.get_config_var("HAVE_VFORK"), - "vfork() not enabled by configure.") - @mock.patch("subprocess._fork_exec") - @mock.patch("subprocess._USE_POSIX_SPAWN", new=False) - def test__use_vfork(self, mock_fork_exec): - self.assertTrue(subprocess._USE_VFORK) # The default value regardless. - mock_fork_exec.side_effect = RuntimeError("just testing args") - with self.assertRaises(RuntimeError): - subprocess.run([sys.executable, "-c", "pass"]) - mock_fork_exec.assert_called_once() - # NOTE: These assertions are *ugly* as they require the last arg - # to remain the have_vfork boolean. We really need to refactor away - # from the giant "wall of args" internal C extension API. - self.assertTrue(mock_fork_exec.call_args.args[-1]) - with mock.patch.object(subprocess, '_USE_VFORK', False): - with self.assertRaises(RuntimeError): - subprocess.run([sys.executable, "-c", "pass"]) - self.assertFalse(mock_fork_exec.call_args_list[-1].args[-1]) - @unittest.skipIf(not sysconfig.get_config_var("HAVE_VFORK"), "vfork() not enabled by configure.") @unittest.skipIf(sys.platform != "linux", "Linux only, requires strace.") @@ -3478,7 +3459,6 @@ def test_vfork_used_when_expected(self): # Test that each individual thing that would disable the use of vfork # actually disables it. for sub_name, preamble, sp_kwarg, expect_permission_error in ( - ("!use_vfork", "subprocess._USE_VFORK = False", "", False), ("preexec", "", "preexec_fn=lambda: None", False), ("setgid", "", f"group={os.getgid()}", True), ("setuid", "", f"user={os.getuid()}", True), diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-06-28-23-17-22.gh-issue-121381.i2xL7P.rst b/Misc/NEWS.d/next/Core and Builtins/2024-06-28-23-17-22.gh-issue-121381.i2xL7P.rst new file mode 100644 index 00000000000000..3a02145378e2cd --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-06-28-23-17-22.gh-issue-121381.i2xL7P.rst @@ -0,0 +1,2 @@ +Remove ``subprocess._USE_VFORK`` escape hatch code and documentation. +It was added just in case, and doesn't have any known cases that require it. diff --git a/Modules/_posixsubprocess.c b/Modules/_posixsubprocess.c index daec4ad708dea4..ad6d7ceda84e37 100644 --- a/Modules/_posixsubprocess.c +++ b/Modules/_posixsubprocess.c @@ -977,7 +977,6 @@ _posixsubprocess.fork_exec as subprocess_fork_exec uid as uid_object: object child_umask: int preexec_fn: object - allow_vfork: bool / Spawn a fresh new child process. @@ -1014,8 +1013,8 @@ subprocess_fork_exec_impl(PyObject *module, PyObject *process_args, pid_t pgid_to_set, PyObject *gid_object, PyObject *extra_groups_packed, PyObject *uid_object, int child_umask, - PyObject *preexec_fn, int allow_vfork) -/*[clinic end generated code: output=7ee4f6ee5cf22b5b input=51757287ef266ffa]*/ + PyObject *preexec_fn) +/*[clinic end generated code: output=288464dc56e373c7 input=f311c3bcb5dd55c8]*/ { PyObject *converted_args = NULL, *fast_args = NULL; PyObject *preexec_fn_args_tuple = NULL; @@ -1218,7 +1217,7 @@ subprocess_fork_exec_impl(PyObject *module, PyObject *process_args, #ifdef VFORK_USABLE /* Use vfork() only if it's safe. See the comment above child_exec(). */ sigset_t old_sigs; - if (preexec_fn == Py_None && allow_vfork && + if (preexec_fn == Py_None && uid == (uid_t)-1 && gid == (gid_t)-1 && extra_group_size < 0) { /* Block all signals to ensure that no signal handlers are run in the * child process while it shares memory with us. Note that signals diff --git a/Modules/clinic/_posixsubprocess.c.h b/Modules/clinic/_posixsubprocess.c.h index dd7644de6b7534..d52629cf6eaa5b 100644 --- a/Modules/clinic/_posixsubprocess.c.h +++ b/Modules/clinic/_posixsubprocess.c.h @@ -9,7 +9,7 @@ PyDoc_STRVAR(subprocess_fork_exec__doc__, " env, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite,\n" " errpipe_read, errpipe_write, restore_signals, call_setsid,\n" " pgid_to_set, gid, extra_groups, uid, child_umask, preexec_fn,\n" -" allow_vfork, /)\n" +" /)\n" "--\n" "\n" "Spawn a fresh new child process.\n" @@ -48,7 +48,7 @@ subprocess_fork_exec_impl(PyObject *module, PyObject *process_args, pid_t pgid_to_set, PyObject *gid_object, PyObject *extra_groups_packed, PyObject *uid_object, int child_umask, - PyObject *preexec_fn, int allow_vfork); + PyObject *preexec_fn); static PyObject * subprocess_fork_exec(PyObject *module, PyObject *const *args, Py_ssize_t nargs) @@ -76,9 +76,8 @@ subprocess_fork_exec(PyObject *module, PyObject *const *args, Py_ssize_t nargs) PyObject *uid_object; int child_umask; PyObject *preexec_fn; - int allow_vfork; - if (!_PyArg_CheckPositional("fork_exec", nargs, 23, 23)) { + if (!_PyArg_CheckPositional("fork_exec", nargs, 22, 22)) { goto exit; } process_args = args[0]; @@ -146,13 +145,9 @@ subprocess_fork_exec(PyObject *module, PyObject *const *args, Py_ssize_t nargs) goto exit; } preexec_fn = args[21]; - allow_vfork = PyObject_IsTrue(args[22]); - if (allow_vfork < 0) { - goto exit; - } - return_value = subprocess_fork_exec_impl(module, process_args, executable_list, close_fds, py_fds_to_keep, cwd_obj, env_list, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, errpipe_read, errpipe_write, restore_signals, call_setsid, pgid_to_set, gid_object, extra_groups_packed, uid_object, child_umask, preexec_fn, allow_vfork); + return_value = subprocess_fork_exec_impl(module, process_args, executable_list, close_fds, py_fds_to_keep, cwd_obj, env_list, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, errpipe_read, errpipe_write, restore_signals, call_setsid, pgid_to_set, gid_object, extra_groups_packed, uid_object, child_umask, preexec_fn); exit: return return_value; } -/*[clinic end generated code: output=48555f5965a871be input=a9049054013a1b77]*/ +/*[clinic end generated code: output=942bc2748a9c2785 input=a9049054013a1b77]*/ From d01fd240517e0c5fb679686a93119d0aa6b0fc0f Mon Sep 17 00:00:00 2001 From: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com> Date: Wed, 31 Jul 2024 10:02:08 +0300 Subject: [PATCH 35/70] Docs: bump Sphinx to 8.0 and update constraints (#122496) Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com> --- Doc/constraints.txt | 14 +++++++------- Doc/requirements.txt | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/Doc/constraints.txt b/Doc/constraints.txt index 147de1271eb2b7..ab3b39bf380dad 100644 --- a/Doc/constraints.txt +++ b/Doc/constraints.txt @@ -9,16 +9,16 @@ babel<3 colorama<0.5 imagesize<1.5 Jinja2<3.2 -packaging<24 -Pygments>=2.16.1,<3 +packaging<25 +Pygments<3 requests<3 snowballstemmer<3 -sphinxcontrib-applehelp<1.1 -sphinxcontrib-devhelp<1.1 -sphinxcontrib-htmlhelp<2.1 +sphinxcontrib-applehelp<2.1 +sphinxcontrib-devhelp<2.1 +sphinxcontrib-htmlhelp<2.2 sphinxcontrib-jsmath<1.1 -sphinxcontrib-qthelp<1.1 -sphinxcontrib-serializinghtml<1.2 +sphinxcontrib-qthelp<2.1 +sphinxcontrib-serializinghtml<2.1 # Direct dependencies of Jinja2 (Jinja is a dependency of Sphinx, see above) MarkupSafe<2.2 diff --git a/Doc/requirements.txt b/Doc/requirements.txt index 98ad52e17538a4..bf1028020b7af7 100644 --- a/Doc/requirements.txt +++ b/Doc/requirements.txt @@ -6,7 +6,7 @@ # Sphinx version is pinned so that new versions that introduce new warnings # won't suddenly cause build failures. Updating the version is fine as long # as no warnings are raised by doing so. -sphinx~=7.4.0 +sphinx~=8.0.0 blurb From f071f01b7b7e19d7d6b3a4b0ec62f820ecb14660 Mon Sep 17 00:00:00 2001 From: Russell Keith-Magee Date: Wed, 31 Jul 2024 16:24:15 +0800 Subject: [PATCH 36/70] gh-122133: Rework pure Python socketpair tests to avoid use of importlib.reload. (#122493) Co-authored-by: Gregory P. Smith --- Lib/socket.py | 121 +++++++++++++++++++--------------------- Lib/test/test_socket.py | 20 ++----- 2 files changed, 64 insertions(+), 77 deletions(-) diff --git a/Lib/socket.py b/Lib/socket.py index 2e6043cbdb8005..9207101dcf9d58 100644 --- a/Lib/socket.py +++ b/Lib/socket.py @@ -592,16 +592,65 @@ def fromshare(info): return socket(0, 0, 0, info) __all__.append("fromshare") -if hasattr(_socket, "socketpair"): +# Origin: https://gist.github.com/4325783, by Geert Jansen. Public domain. +# This is used if _socket doesn't natively provide socketpair. It's +# always defined so that it can be patched in for testing purposes. +def _fallback_socketpair(family=AF_INET, type=SOCK_STREAM, proto=0): + if family == AF_INET: + host = _LOCALHOST + elif family == AF_INET6: + host = _LOCALHOST_V6 + else: + raise ValueError("Only AF_INET and AF_INET6 socket address families " + "are supported") + if type != SOCK_STREAM: + raise ValueError("Only SOCK_STREAM socket type is supported") + if proto != 0: + raise ValueError("Only protocol zero is supported") + + # We create a connected TCP socket. Note the trick with + # setblocking(False) that prevents us from having to create a thread. + lsock = socket(family, type, proto) + try: + lsock.bind((host, 0)) + lsock.listen() + # On IPv6, ignore flow_info and scope_id + addr, port = lsock.getsockname()[:2] + csock = socket(family, type, proto) + try: + csock.setblocking(False) + try: + csock.connect((addr, port)) + except (BlockingIOError, InterruptedError): + pass + csock.setblocking(True) + ssock, _ = lsock.accept() + except: + csock.close() + raise + finally: + lsock.close() - def socketpair(family=None, type=SOCK_STREAM, proto=0): - """socketpair([family[, type[, proto]]]) -> (socket object, socket object) + # Authenticating avoids using a connection from something else + # able to connect to {host}:{port} instead of us. + # We expect only AF_INET and AF_INET6 families. + try: + if ( + ssock.getsockname() != csock.getpeername() + or csock.getsockname() != ssock.getpeername() + ): + raise ConnectionError("Unexpected peer connection") + except: + # getsockname() and getpeername() can fail + # if either socket isn't connected. + ssock.close() + csock.close() + raise - Create a pair of socket objects from the sockets returned by the platform - socketpair() function. - The arguments are the same as for socket() except the default family is - AF_UNIX if defined on the platform; otherwise, the default is AF_INET. - """ + return (ssock, csock) + +if hasattr(_socket, "socketpair"): + def socketpair(family=None, type=SOCK_STREAM, proto=0): if family is None: try: family = AF_UNIX @@ -613,61 +662,7 @@ def socketpair(family=None, type=SOCK_STREAM, proto=0): return a, b else: - - # Origin: https://gist.github.com/4325783, by Geert Jansen. Public domain. - def socketpair(family=AF_INET, type=SOCK_STREAM, proto=0): - if family == AF_INET: - host = _LOCALHOST - elif family == AF_INET6: - host = _LOCALHOST_V6 - else: - raise ValueError("Only AF_INET and AF_INET6 socket address families " - "are supported") - if type != SOCK_STREAM: - raise ValueError("Only SOCK_STREAM socket type is supported") - if proto != 0: - raise ValueError("Only protocol zero is supported") - - # We create a connected TCP socket. Note the trick with - # setblocking(False) that prevents us from having to create a thread. - lsock = socket(family, type, proto) - try: - lsock.bind((host, 0)) - lsock.listen() - # On IPv6, ignore flow_info and scope_id - addr, port = lsock.getsockname()[:2] - csock = socket(family, type, proto) - try: - csock.setblocking(False) - try: - csock.connect((addr, port)) - except (BlockingIOError, InterruptedError): - pass - csock.setblocking(True) - ssock, _ = lsock.accept() - except: - csock.close() - raise - finally: - lsock.close() - - # Authenticating avoids using a connection from something else - # able to connect to {host}:{port} instead of us. - # We expect only AF_INET and AF_INET6 families. - try: - if ( - ssock.getsockname() != csock.getpeername() - or csock.getsockname() != ssock.getpeername() - ): - raise ConnectionError("Unexpected peer connection") - except: - # getsockname() and getpeername() can fail - # if either socket isn't connected. - ssock.close() - csock.close() - raise - - return (ssock, csock) + socketpair = _fallback_socketpair __all__.append("socketpair") socketpair.__doc__ = """socketpair([family[, type[, proto]]]) -> (socket object, socket object) diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py index bb65c3c5993b88..7c607a809aa428 100644 --- a/Lib/test/test_socket.py +++ b/Lib/test/test_socket.py @@ -4861,7 +4861,6 @@ def _testSend(self): class PurePythonSocketPairTest(SocketPairTest): - # Explicitly use socketpair AF_INET or AF_INET6 to ensure that is the # code path we're using regardless platform is the pure python one where # `_socket.socketpair` does not exist. (AF_INET does not work with @@ -4876,28 +4875,21 @@ def socketpair(self): # Local imports in this class make for easy security fix backporting. def setUp(self): - import _socket - self._orig_sp = getattr(_socket, 'socketpair', None) - if self._orig_sp is not None: + if hasattr(_socket, "socketpair"): + self._orig_sp = socket.socketpair # This forces the version using the non-OS provided socketpair # emulation via an AF_INET socket in Lib/socket.py. - del _socket.socketpair - import importlib - global socket - socket = importlib.reload(socket) + socket.socketpair = socket._fallback_socketpair else: - pass # This platform already uses the non-OS provided version. + # This platform already uses the non-OS provided version. + self._orig_sp = None super().setUp() def tearDown(self): super().tearDown() - import _socket if self._orig_sp is not None: # Restore the default socket.socketpair definition. - _socket.socketpair = self._orig_sp - import importlib - global socket - socket = importlib.reload(socket) + socket.socketpair = self._orig_sp def test_recv(self): msg = self.serv.recv(1024) From e60ee11cb51b87deeb22ad125717bd0d0dc10fa8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sviatoslav=20Sydorenko=20=28=D0=A1=D0=B2=D1=8F=D1=82=D0=BE?= =?UTF-8?q?=D1=81=D0=BB=D0=B0=D0=B2=20=D0=A1=D0=B8=D0=B4=D0=BE=D1=80=D0=B5?= =?UTF-8?q?=D0=BD=D0=BA=D0=BE=29?= Date: Wed, 31 Jul 2024 10:32:16 +0200 Subject: [PATCH 37/70] Move change detection to separate workflow in CI (#122336) --- .github/workflows/build.yml | 131 ++------------- .../workflows/reusable-change-detection.yml | 150 ++++++++++++++++++ 2 files changed, 163 insertions(+), 118 deletions(-) create mode 100644 .github/workflows/reusable-change-detection.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 6568b50c3a6a13..4f3995a020e31b 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,8 +1,5 @@ name: Tests -# gh-84728: "paths-ignore" is not used to skip documentation-only PRs, because -# it prevents to mark a job as mandatory. A PR cannot be merged if a job is -# mandatory but not scheduled because of "paths-ignore". on: workflow_dispatch: push: @@ -23,121 +20,19 @@ concurrency: jobs: check_source: - name: 'Check for source changes' - runs-on: ubuntu-latest - timeout-minutes: 10 - outputs: - # Some of the referenced steps set outputs conditionally and there may be - # cases when referencing them evaluates to empty strings. It is nice to - # work with proper booleans so they have to be evaluated through JSON - # conversion in the expressions. However, empty strings used like that - # may trigger all sorts of undefined and hard-to-debug behaviors in - # GitHub Actions CI/CD. To help with this, all of the outputs set here - # that are meant to be used as boolean flags (and not arbitrary strings), - # MUST have fallbacks with default values set. A common pattern would be - # to add ` || false` to all such expressions here, in the output - # definitions. They can then later be safely used through the following - # idiom in job conditionals and other expressions. Here's some examples: - # - # if: fromJSON(needs.check_source.outputs.run-docs) - # - # ${{ - # fromJSON(needs.check_source.outputs.run_tests) - # && 'truthy-branch' - # || 'falsy-branch' - # }} - # - run-docs: ${{ steps.docs-changes.outputs.run-docs || false }} - run-win-msi: ${{ steps.win-msi-changes.outputs.run-win-msi || false }} - run_tests: ${{ steps.check.outputs.run_tests || false }} - run_hypothesis: ${{ steps.check.outputs.run_hypothesis || false }} - run_cifuzz: ${{ steps.check.outputs.run_cifuzz || false }} - config_hash: ${{ steps.config_hash.outputs.hash }} # str - steps: - - uses: actions/checkout@v4 - - name: Check for source changes - id: check - run: | - if [ -z "$GITHUB_BASE_REF" ]; then - echo "run_tests=true" >> $GITHUB_OUTPUT - else - git fetch origin $GITHUB_BASE_REF --depth=1 - # git diff "origin/$GITHUB_BASE_REF..." (3 dots) may be more - # reliable than git diff "origin/$GITHUB_BASE_REF.." (2 dots), - # but it requires to download more commits (this job uses - # "git fetch --depth=1"). - # - # git diff "origin/$GITHUB_BASE_REF..." (3 dots) works with Git - # 2.26, but Git 2.28 is stricter and fails with "no merge base". - # - # git diff "origin/$GITHUB_BASE_REF.." (2 dots) should be enough on - # GitHub, since GitHub starts by merging origin/$GITHUB_BASE_REF - # into the PR branch anyway. - # - # https://github.com/python/core-workflow/issues/373 - git diff --name-only origin/$GITHUB_BASE_REF.. | grep -qvE '(\.rst$|^Doc|^Misc|^\.pre-commit-config\.yaml$|\.ruff\.toml$|\.md$|mypy\.ini$)' && echo "run_tests=true" >> $GITHUB_OUTPUT || true - fi - - # Check if we should run hypothesis tests - GIT_BRANCH=${GITHUB_BASE_REF:-${GITHUB_REF#refs/heads/}} - echo $GIT_BRANCH - if $(echo "$GIT_BRANCH" | grep -q -w '3\.\(8\|9\|10\|11\)'); then - echo "Branch too old for hypothesis tests" - echo "run_hypothesis=false" >> $GITHUB_OUTPUT - else - echo "Run hypothesis tests" - echo "run_hypothesis=true" >> $GITHUB_OUTPUT - fi - - # oss-fuzz maintains a configuration for fuzzing the main branch of - # CPython, so CIFuzz should be run only for code that is likely to be - # merged into the main branch; compatibility with older branches may - # be broken. - FUZZ_RELEVANT_FILES='(\.c$|\.h$|\.cpp$|^configure$|^\.github/workflows/build\.yml$|^Modules/_xxtestfuzz)' - if [ "$GITHUB_BASE_REF" = "main" ] && [ "$(git diff --name-only origin/$GITHUB_BASE_REF.. | grep -qE $FUZZ_RELEVANT_FILES; echo $?)" -eq 0 ]; then - # The tests are pretty slow so they are executed only for PRs - # changing relevant files. - echo "Run CIFuzz tests" - echo "run_cifuzz=true" >> $GITHUB_OUTPUT - else - echo "Branch too old for CIFuzz tests; or no C files were changed" - echo "run_cifuzz=false" >> $GITHUB_OUTPUT - fi - - name: Compute hash for config cache key - id: config_hash - run: | - echo "hash=${{ hashFiles('configure', 'configure.ac', '.github/workflows/build.yml') }}" >> $GITHUB_OUTPUT - - name: Get a list of the changed documentation-related files - if: github.event_name == 'pull_request' - id: changed-docs-files - uses: Ana06/get-changed-files@v2.3.0 - with: - filter: | - Doc/** - Misc/** - .github/workflows/reusable-docs.yml - format: csv # works for paths with spaces - - name: Check for docs changes - if: >- - github.event_name == 'pull_request' - && steps.changed-docs-files.outputs.added_modified_renamed != '' - id: docs-changes - run: | - echo "run-docs=true" >> "${GITHUB_OUTPUT}" - - name: Get a list of the MSI installer-related files - id: changed-win-msi-files - uses: Ana06/get-changed-files@v2.3.0 - with: - filter: | - Tools/msi/** - .github/workflows/reusable-windows-msi.yml - format: csv # works for paths with spaces - - name: Check for changes in MSI installer-related files - if: >- - steps.changed-win-msi-files.outputs.added_modified_renamed != '' - id: win-msi-changes - run: | - echo "run-win-msi=true" >> "${GITHUB_OUTPUT}" + name: Change detection + # To use boolean outputs from this job, parse them as JSON. + # Here's some examples: + # + # if: fromJSON(needs.check_source.outputs.run-docs) + # + # ${{ + # fromJSON(needs.check_source.outputs.run_tests) + # && 'truthy-branch' + # || 'falsy-branch' + # }} + # + uses: ./.github/workflows/reusable-change-detection.yml check-docs: name: Docs diff --git a/.github/workflows/reusable-change-detection.yml b/.github/workflows/reusable-change-detection.yml new file mode 100644 index 00000000000000..25c789d335efc8 --- /dev/null +++ b/.github/workflows/reusable-change-detection.yml @@ -0,0 +1,150 @@ +--- + +name: Change detection + +on: # yamllint disable-line rule:truthy + workflow_call: + outputs: + # Some of the referenced steps set outputs conditionally and there may be + # cases when referencing them evaluates to empty strings. It is nice to + # work with proper booleans so they have to be evaluated through JSON + # conversion in the expressions. However, empty strings used like that + # may trigger all sorts of undefined and hard-to-debug behaviors in + # GitHub Actions CI/CD. To help with this, all of the outputs set here + # that are meant to be used as boolean flags (and not arbitrary strings), + # MUST have fallbacks with default values set. A common pattern would be + # to add ` || false` to all such expressions here, in the output + # definitions. They can then later be safely used through the following + # idiom in job conditionals and other expressions. Here's some examples: + # + # if: fromJSON(needs.change-detection.outputs.run-docs) + # + # ${{ + # fromJSON(needs.change-detection.outputs.run-tests) + # && 'truthy-branch' + # || 'falsy-branch' + # }} + # + config_hash: + description: Config hash value for use in cache keys + value: ${{ jobs.compute-changes.outputs.config-hash }} # str + run-docs: + description: Whether to build the docs + value: ${{ jobs.compute-changes.outputs.run-docs || false }} # bool + run_tests: + description: Whether to run the regular tests + value: ${{ jobs.compute-changes.outputs.run-tests || false }} # bool + run-win-msi: + description: Whether to run the MSI installer smoke tests + value: >- # bool + ${{ jobs.compute-changes.outputs.run-win-msi || false }} + run_hypothesis: + description: Whether to run the Hypothesis tests + value: >- # bool + ${{ jobs.compute-changes.outputs.run-hypothesis || false }} + run_cifuzz: + description: Whether to run the CIFuzz job + value: >- # bool + ${{ jobs.compute-changes.outputs.run-cifuzz || false }} + +jobs: + compute-changes: + name: Compute changed files + runs-on: ubuntu-latest + timeout-minutes: 10 + outputs: + config-hash: ${{ steps.config-hash.outputs.hash }} + run-cifuzz: ${{ steps.check.outputs.run-cifuzz }} + run-docs: ${{ steps.docs-changes.outputs.run-docs }} + run-hypothesis: ${{ steps.check.outputs.run-hypothesis }} + run-tests: ${{ steps.check.outputs.run-tests }} + run-win-msi: ${{ steps.win-msi-changes.outputs.run-win-msi }} + steps: + - run: >- + echo '${{ github.event_name }}' + - uses: actions/checkout@v4 + - name: Check for source changes + id: check + run: | + if [ -z "$GITHUB_BASE_REF" ]; then + echo "run-tests=true" >> $GITHUB_OUTPUT + else + git fetch origin $GITHUB_BASE_REF --depth=1 + # git diff "origin/$GITHUB_BASE_REF..." (3 dots) may be more + # reliable than git diff "origin/$GITHUB_BASE_REF.." (2 dots), + # but it requires to download more commits (this job uses + # "git fetch --depth=1"). + # + # git diff "origin/$GITHUB_BASE_REF..." (3 dots) works with Git + # 2.26, but Git 2.28 is stricter and fails with "no merge base". + # + # git diff "origin/$GITHUB_BASE_REF.." (2 dots) should be enough on + # GitHub, since GitHub starts by merging origin/$GITHUB_BASE_REF + # into the PR branch anyway. + # + # https://github.com/python/core-workflow/issues/373 + git diff --name-only origin/$GITHUB_BASE_REF.. | grep -qvE '(\.rst$|^Doc|^Misc|^\.pre-commit-config\.yaml$|\.ruff\.toml$|\.md$|mypy\.ini$)' && echo "run-tests=true" >> $GITHUB_OUTPUT || true + fi + + # Check if we should run hypothesis tests + GIT_BRANCH=${GITHUB_BASE_REF:-${GITHUB_REF#refs/heads/}} + echo $GIT_BRANCH + if $(echo "$GIT_BRANCH" | grep -q -w '3\.\(8\|9\|10\|11\)'); then + echo "Branch too old for hypothesis tests" + echo "run-hypothesis=false" >> $GITHUB_OUTPUT + else + echo "Run hypothesis tests" + echo "run-hypothesis=true" >> $GITHUB_OUTPUT + fi + + # oss-fuzz maintains a configuration for fuzzing the main branch of + # CPython, so CIFuzz should be run only for code that is likely to be + # merged into the main branch; compatibility with older branches may + # be broken. + FUZZ_RELEVANT_FILES='(\.c$|\.h$|\.cpp$|^configure$|^\.github/workflows/build\.yml$|^Modules/_xxtestfuzz)' + if [ "$GITHUB_BASE_REF" = "main" ] && [ "$(git diff --name-only origin/$GITHUB_BASE_REF.. | grep -qE $FUZZ_RELEVANT_FILES; echo $?)" -eq 0 ]; then + # The tests are pretty slow so they are executed only for PRs + # changing relevant files. + echo "Run CIFuzz tests" + echo "run-cifuzz=true" >> $GITHUB_OUTPUT + else + echo "Branch too old for CIFuzz tests; or no C files were changed" + echo "run-cifuzz=false" >> $GITHUB_OUTPUT + fi + - name: Compute hash for config cache key + id: config-hash + run: | + echo "hash=${{ hashFiles('configure', 'configure.ac', '.github/workflows/build.yml') }}" >> $GITHUB_OUTPUT + - name: Get a list of the changed documentation-related files + if: github.event_name == 'pull_request' + id: changed-docs-files + uses: Ana06/get-changed-files@v2.3.0 + with: + filter: | + Doc/** + Misc/** + .github/workflows/reusable-docs.yml + format: csv # works for paths with spaces + - name: Check for docs changes + if: >- + github.event_name == 'pull_request' + && steps.changed-docs-files.outputs.added_modified_renamed != '' + id: docs-changes + run: | + echo "run-docs=true" >> "${GITHUB_OUTPUT}" + - name: Get a list of the MSI installer-related files + id: changed-win-msi-files + uses: Ana06/get-changed-files@v2.3.0 + with: + filter: | + Tools/msi/** + .github/workflows/reusable-windows-msi.yml + format: csv # works for paths with spaces + - name: Check for changes in MSI installer-related files + if: >- + steps.changed-win-msi-files.outputs.added_modified_renamed != '' + id: win-msi-changes + run: | + echo "run-win-msi=true" >> "${GITHUB_OUTPUT}" + +... From bd3d31f380cd451a4ab6da5fbfde463fed95b5b5 Mon Sep 17 00:00:00 2001 From: CF Bolz-Tereick Date: Wed, 31 Jul 2024 12:33:29 +0200 Subject: [PATCH 38/70] gh-87320: In the code module, handle exceptions raised in sys.excepthook (GH-122456) Before, the exception caused by calling non-default sys.excepthook in code.InteractiveInterpreter bubbled up to the caller, ending the REPL. --- Lib/code.py | 19 +++++++++-- Lib/test/test_code_module.py | 33 +++++++++++++++++++ Lib/test/test_pyrepl/test_pyrepl.py | 24 ++++++++++++++ ...4-07-30-14-46-16.gh-issue-87320.-Yk1wb.rst | 3 ++ 4 files changed, 76 insertions(+), 3 deletions(-) create mode 100644 Misc/NEWS.d/next/Library/2024-07-30-14-46-16.gh-issue-87320.-Yk1wb.rst diff --git a/Lib/code.py b/Lib/code.py index a55fced0704b1d..cd3889067d068d 100644 --- a/Lib/code.py +++ b/Lib/code.py @@ -129,7 +129,7 @@ def showsyntaxerror(self, filename=None, **kwargs): else: # If someone has set sys.excepthook, we let that take precedence # over self.write - sys.excepthook(type, value, tb) + self._call_excepthook(type, value, tb) def showtraceback(self, **kwargs): """Display the exception that just occurred. @@ -144,16 +144,29 @@ def showtraceback(self, **kwargs): sys.last_traceback = last_tb sys.last_exc = ei[1] try: - lines = traceback.format_exception(ei[0], ei[1], last_tb.tb_next, colorize=colorize) if sys.excepthook is sys.__excepthook__: + lines = traceback.format_exception(ei[0], ei[1], last_tb.tb_next, colorize=colorize) self.write(''.join(lines)) else: # If someone has set sys.excepthook, we let that take precedence # over self.write - sys.excepthook(ei[0], ei[1], last_tb) + self._call_excepthook(ei[0], ei[1], last_tb) finally: last_tb = ei = None + def _call_excepthook(self, typ, value, tb): + try: + sys.excepthook(typ, value, tb) + except SystemExit: + raise + except BaseException as e: + e.__context__ = None + print('Error in sys.excepthook:', file=sys.stderr) + sys.__excepthook__(type(e), e, e.__traceback__.tb_next) + print(file=sys.stderr) + print('Original exception was:', file=sys.stderr) + sys.__excepthook__(typ, value, tb) + def write(self, data): """Write a string. diff --git a/Lib/test/test_code_module.py b/Lib/test/test_code_module.py index 259778a5cade98..5dc89108f0ad88 100644 --- a/Lib/test/test_code_module.py +++ b/Lib/test/test_code_module.py @@ -77,6 +77,39 @@ def test_sysexcepthook(self): self.console.interact() self.assertTrue(hook.called) + def test_sysexcepthook_crashing_doesnt_close_repl(self): + self.infunc.side_effect = ["1/0", "a = 123", "print(a)", EOFError('Finished')] + self.sysmod.excepthook = 1 + self.console.interact() + self.assertEqual(['write', ('123', ), {}], self.stdout.method_calls[0]) + error = "".join(call.args[0] for call in self.stderr.method_calls if call[0] == 'write') + self.assertIn("Error in sys.excepthook:", error) + self.assertEqual(error.count("'int' object is not callable"), 1) + self.assertIn("Original exception was:", error) + self.assertIn("division by zero", error) + + def test_sysexcepthook_raising_BaseException(self): + self.infunc.side_effect = ["1/0", "a = 123", "print(a)", EOFError('Finished')] + s = "not so fast" + def raise_base(*args, **kwargs): + raise BaseException(s) + self.sysmod.excepthook = raise_base + self.console.interact() + self.assertEqual(['write', ('123', ), {}], self.stdout.method_calls[0]) + error = "".join(call.args[0] for call in self.stderr.method_calls if call[0] == 'write') + self.assertIn("Error in sys.excepthook:", error) + self.assertEqual(error.count("not so fast"), 1) + self.assertIn("Original exception was:", error) + self.assertIn("division by zero", error) + + def test_sysexcepthook_raising_SystemExit_gets_through(self): + self.infunc.side_effect = ["1/0"] + def raise_base(*args, **kwargs): + raise SystemExit + self.sysmod.excepthook = raise_base + with self.assertRaises(SystemExit): + self.console.interact() + def test_banner(self): # with banner self.infunc.side_effect = EOFError('Finished') diff --git a/Lib/test/test_pyrepl/test_pyrepl.py b/Lib/test/test_pyrepl/test_pyrepl.py index 3a1bacef8a1756..d5eafc5eb58cac 100644 --- a/Lib/test/test_pyrepl/test_pyrepl.py +++ b/Lib/test/test_pyrepl/test_pyrepl.py @@ -1049,6 +1049,30 @@ def test_python_basic_repl(self): self.assertNotIn("Exception", output) self.assertNotIn("Traceback", output) + @force_not_colorized + def test_bad_sys_excepthook_doesnt_crash_pyrepl(self): + env = os.environ.copy() + commands = ("import sys\n" + "sys.excepthook = 1\n" + "1/0\n" + "exit()\n") + + def check(output, exitcode): + self.assertIn("Error in sys.excepthook:", output) + self.assertEqual(output.count("'int' object is not callable"), 1) + self.assertIn("Original exception was:", output) + self.assertIn("division by zero", output) + self.assertEqual(exitcode, 0) + env.pop("PYTHON_BASIC_REPL", None) + output, exit_code = self.run_repl(commands, env=env) + if "can\'t use pyrepl" in output: + self.skipTest("pyrepl not available") + check(output, exit_code) + + env["PYTHON_BASIC_REPL"] = "1" + output, exit_code = self.run_repl(commands, env=env) + check(output, exit_code) + def test_not_wiping_history_file(self): # skip, if readline module is not available import_module('readline') diff --git a/Misc/NEWS.d/next/Library/2024-07-30-14-46-16.gh-issue-87320.-Yk1wb.rst b/Misc/NEWS.d/next/Library/2024-07-30-14-46-16.gh-issue-87320.-Yk1wb.rst new file mode 100644 index 00000000000000..4322b719c690c2 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-07-30-14-46-16.gh-issue-87320.-Yk1wb.rst @@ -0,0 +1,3 @@ +In :class:`code.InteractiveInterpreter`, handle exceptions caused by calling a +non-default :func:`sys.excepthook`. Before, the exception bubbled up to the +caller, ending the :term:`REPL`. From 8844197daaeb3aa026cfe1cac6cf9d1b52c2540e Mon Sep 17 00:00:00 2001 From: Malcolm Smith Date: Wed, 31 Jul 2024 19:35:10 +0100 Subject: [PATCH 39/70] gh-116622: Skip PosixPathTest.test_expanduser_pwd2 on platforms which don't support pwd.getpwall (GH-122521) --- Lib/test/test_posixpath.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_posixpath.py b/Lib/test/test_posixpath.py index fb714fd90ae2b3..ca5cf42f8fcd71 100644 --- a/Lib/test/test_posixpath.py +++ b/Lib/test/test_posixpath.py @@ -5,7 +5,7 @@ import unittest from posixpath import realpath, abspath, dirname, basename from test import test_genericpath -from test.support import import_helper +from test.support import get_attribute, import_helper from test.support import cpython_only, os_helper from test.support.os_helper import FakePath from unittest import mock @@ -359,7 +359,7 @@ def test_expanduser_pwd(self): "no home directory on VxWorks") def test_expanduser_pwd2(self): pwd = import_helper.import_module('pwd') - for all_entry in pwd.getpwall(): + for all_entry in get_attribute(pwd, 'getpwall')(): name = all_entry.pw_name # gh-121200: pw_dir can be different between getpwall() and From 06656e259bc9b2c3cf8a23bdc6e4acb052c56e1f Mon Sep 17 00:00:00 2001 From: Malcolm Smith Date: Thu, 1 Aug 2024 01:23:10 +0100 Subject: [PATCH 40/70] gh-116622: Don't expose `FICLONE` ioctl on Android (#122522) Don't expose `FICLONE` ioctl on Android Co-authored-by: Russell Keith-Magee --- .../Library/2024-07-31-15-08-42.gh-issue-116622.aKxIQA.rst | 2 ++ Modules/fcntlmodule.c | 5 +++++ 2 files changed, 7 insertions(+) create mode 100644 Misc/NEWS.d/next/Library/2024-07-31-15-08-42.gh-issue-116622.aKxIQA.rst diff --git a/Misc/NEWS.d/next/Library/2024-07-31-15-08-42.gh-issue-116622.aKxIQA.rst b/Misc/NEWS.d/next/Library/2024-07-31-15-08-42.gh-issue-116622.aKxIQA.rst new file mode 100644 index 00000000000000..fc65b4d973b27d --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-07-31-15-08-42.gh-issue-116622.aKxIQA.rst @@ -0,0 +1,2 @@ +On Android, the ``FICLONE`` and ``FICLONERANGE`` constants are no longer +exposed by :mod:`fcntl`, as these ioctls are blocked by SELinux. diff --git a/Modules/fcntlmodule.c b/Modules/fcntlmodule.c index 0c06c03a6c403e..90ebfd7e99a777 100644 --- a/Modules/fcntlmodule.c +++ b/Modules/fcntlmodule.c @@ -580,12 +580,17 @@ all_ins(PyObject* m) #ifdef F_GETPIPE_SZ if (PyModule_AddIntMacro(m, F_GETPIPE_SZ)) return -1; #endif + +/* On Android, FICLONE is blocked by SELinux. */ +#ifndef __ANDROID__ #ifdef FICLONE if (PyModule_AddIntMacro(m, FICLONE)) return -1; #endif #ifdef FICLONERANGE if (PyModule_AddIntMacro(m, FICLONERANGE)) return -1; #endif +#endif + #ifdef F_GETOWN_EX // since Linux 2.6.32 if (PyModule_AddIntMacro(m, F_GETOWN_EX)) return -1; From 46f5a4f9e1781ad8d60eb53bbaf6cd8534a286cd Mon Sep 17 00:00:00 2001 From: jianghuyiyuan Date: Thu, 1 Aug 2024 09:26:09 +0900 Subject: [PATCH 41/70] Fix typos in docs, error messages and comments (#122502) Signed-off-by: jianghuyiyuan --- Doc/howto/isolating-extensions.rst | 2 +- Doc/library/threading.rst | 2 +- Doc/using/configure.rst | 2 +- Include/internal/pycore_freelist_state.h | 2 +- Lib/test/test_webbrowser.py | 2 +- Python/crossinterp.c | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Doc/howto/isolating-extensions.rst b/Doc/howto/isolating-extensions.rst index e35855deedbe5f..a636e06bda8344 100644 --- a/Doc/howto/isolating-extensions.rst +++ b/Doc/howto/isolating-extensions.rst @@ -339,7 +339,7 @@ That is, heap types should: - Define a traverse function using ``Py_tp_traverse``, which visits the type (e.g. using ``Py_VISIT(Py_TYPE(self))``). -Please refer to the the documentation of +Please refer to the documentation of :c:macro:`Py_TPFLAGS_HAVE_GC` and :c:member:`~PyTypeObject.tp_traverse` for additional considerations. diff --git a/Doc/library/threading.rst b/Doc/library/threading.rst index 49c2b9b3ccd4fd..cb82fea377697b 100644 --- a/Doc/library/threading.rst +++ b/Doc/library/threading.rst @@ -1018,7 +1018,7 @@ method. The :meth:`~Event.wait` method blocks until the flag is true. has not expired. The return value represents the reason that this blocking method returned; ``True`` if returning because the internal flag is set to true, or ``False`` if a timeout is given and - the the internal flag did not become true within the given wait time. + the internal flag did not become true within the given wait time. When the timeout argument is present and not ``None``, it should be a floating-point number specifying a timeout for the operation in seconds, diff --git a/Doc/using/configure.rst b/Doc/using/configure.rst index 32adfb0ba6e5fc..6a4a52bb6e8b12 100644 --- a/Doc/using/configure.rst +++ b/Doc/using/configure.rst @@ -1120,7 +1120,7 @@ Remove built files. make distclean ^^^^^^^^^^^^^^ -In addition to the the work done by ``make clean``, remove files +In addition to the work done by ``make clean``, remove files created by the configure script. ``configure`` will have to be run before building again. [#]_ diff --git a/Include/internal/pycore_freelist_state.h b/Include/internal/pycore_freelist_state.h index edf79dd7521c41..e8df784bcba06e 100644 --- a/Include/internal/pycore_freelist_state.h +++ b/Include/internal/pycore_freelist_state.h @@ -28,7 +28,7 @@ extern "C" { // A generic freelist of either PyObjects or other data structures. struct _Py_freelist { - // Entries are linked together using the first word of the the object. + // Entries are linked together using the first word of the object. // For PyObjects, this overlaps with the `ob_refcnt` field or the `ob_tid` // field. void *freelist; diff --git a/Lib/test/test_webbrowser.py b/Lib/test/test_webbrowser.py index ae8d776e8413ff..4fcbc5c2e59ea3 100644 --- a/Lib/test/test_webbrowser.py +++ b/Lib/test/test_webbrowser.py @@ -244,7 +244,7 @@ def _obj_ref(self, *args): @unittest.skipIf(getattr(webbrowser, "objc", None) is None, "iOS Webbrowser tests require ctypes") def setUp(self): - # Intercept the the objc library. Wrap the calls to get the + # Intercept the objc library. Wrap the calls to get the # references to classes and selectors to return strings, and # wrap msgSend to return stringified object references self.orig_objc = webbrowser.objc diff --git a/Python/crossinterp.c b/Python/crossinterp.c index acb372af42408e..0aca322d987dba 100644 --- a/Python/crossinterp.c +++ b/Python/crossinterp.c @@ -699,7 +699,7 @@ _PyXI_excinfo_InitFromException(_PyXI_excinfo *info, PyObject *exc) Py_DECREF(tbexc); if (info->errdisplay == NULL) { #ifdef Py_DEBUG - PyErr_FormatUnraisable("Exception ignored while formating TracebackException"); + PyErr_FormatUnraisable("Exception ignored while formatting TracebackException"); #endif PyErr_Clear(); } From a9d56e38a08ec198a2289d8fff65444b39dd4a32 Mon Sep 17 00:00:00 2001 From: Mark Shannon Date: Thu, 1 Aug 2024 09:27:26 +0100 Subject: [PATCH 42/70] GH-122155: Track local variables between pops and pushes in cases generator (GH-122286) --- Include/internal/pycore_opcode_metadata.h | 2 +- Lib/test/test_generated_cases.py | 65 +++++- Python/bytecodes.c | 9 +- Python/executor_cases.c.h | 12 +- Python/generated_cases.c.h | 216 +++++++++++++++---- Python/optimizer_cases.c.h | 3 +- Tools/cases_generator/analyzer.py | 10 +- Tools/cases_generator/generators_common.py | 29 ++- Tools/cases_generator/optimizer_generator.py | 25 +-- Tools/cases_generator/parsing.py | 5 + Tools/cases_generator/stack.py | 172 +++++++++++---- Tools/cases_generator/tier1_generator.py | 50 +++-- Tools/cases_generator/tier2_generator.py | 24 ++- 13 files changed, 463 insertions(+), 159 deletions(-) diff --git a/Include/internal/pycore_opcode_metadata.h b/Include/internal/pycore_opcode_metadata.h index eaba280f1bf1cd..464d3500890e53 100644 --- a/Include/internal/pycore_opcode_metadata.h +++ b/Include/internal/pycore_opcode_metadata.h @@ -903,7 +903,7 @@ int _PyOpcode_num_pushed(int opcode, int oparg) { case UNARY_NOT: return 1; case UNPACK_EX: - return 1 + (oparg >> 8) + (oparg & 0xFF); + return 1 + (oparg & 0xFF) + (oparg >> 8); case UNPACK_SEQUENCE: return oparg; case UNPACK_SEQUENCE_LIST: diff --git a/Lib/test/test_generated_cases.py b/Lib/test/test_generated_cases.py index a4cbdecdc9db11..beafa544aaacb7 100644 --- a/Lib/test/test_generated_cases.py +++ b/Lib/test/test_generated_cases.py @@ -31,7 +31,7 @@ def skip_if_different_mount_drives(): with test_tools.imports_under_tool("cases_generator"): from analyzer import StackItem import parser - from stack import Stack + from stack import Local, Stack import tier1_generator import optimizer_generator @@ -60,9 +60,9 @@ def test_effect_sizes(self): stack.pop(y) stack.pop(x) for out in outputs: - stack.push(out) - self.assertEqual(stack.base_offset.to_c(), "-1 - oparg*2 - oparg") - self.assertEqual(stack.top_offset.to_c(), "1 - oparg*2 - oparg + oparg*4") + stack.push(Local.local(out)) + self.assertEqual(stack.base_offset.to_c(), "-1 - oparg - oparg*2") + self.assertEqual(stack.top_offset.to_c(), "1 - oparg - oparg*2 + oparg*4") class TestGeneratedCases(unittest.TestCase): @@ -602,7 +602,11 @@ def test_array_error_if(self): frame->instr_ptr = next_instr; next_instr += 1; INSTRUCTION_STATS(OP); - if (oparg == 0) { stack_pointer += -1 - oparg; goto somewhere; } + if (oparg == 0) { + stack_pointer += -1 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto somewhere; + } stack_pointer += -1 - oparg; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); @@ -908,7 +912,6 @@ def test_used_unused_used(self): next_instr += 1; INSTRUCTION_STATS(TEST); _PyStackRef w; - _PyStackRef x; _PyStackRef y; // FIRST w = stack_pointer[-1]; @@ -916,11 +919,10 @@ def test_used_unused_used(self): use(w); } // SECOND - x = w; { } // THIRD - y = x; + y = w; { use(y); } @@ -1024,6 +1026,7 @@ def test_pop_on_error_peeks(self): } op(THIRD, (j, k --)) { + j,k; // Mark j and k as used ERROR_IF(cond, error); } @@ -1054,6 +1057,7 @@ def test_pop_on_error_peeks(self): k = b; j = a; { + j,k; // Mark j and k as used if (cond) goto pop_2_error; } stack_pointer += -2; @@ -1063,6 +1067,51 @@ def test_pop_on_error_peeks(self): """ self.run_cases_test(input, output) + def test_push_then_error(self): + + input = """ + op(FIRST, ( -- a)) { + a = 1; + } + + op(SECOND, (a -- a, b)) { + b = 1; + ERROR_IF(cond, error); + } + + macro(TEST) = FIRST + SECOND; + """ + + output = """ + TARGET(TEST) { + frame->instr_ptr = next_instr; + next_instr += 1; + INSTRUCTION_STATS(TEST); + _PyStackRef a; + _PyStackRef b; + // FIRST + { + a = 1; + } + // SECOND + { + b = 1; + if (cond) { + stack_pointer[0] = a; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } + } + stack_pointer[0] = a; + stack_pointer[1] = b; + stack_pointer += 2; + assert(WITHIN_STACK_BOUNDS()); + DISPATCH(); + } + """ + self.run_cases_test(input, output) + class TestGeneratedAbstractCases(unittest.TestCase): def setUp(self) -> None: diff --git a/Python/bytecodes.c b/Python/bytecodes.c index 4afce2cc3bea9d..abfd8039b293a1 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -1357,8 +1357,8 @@ dummy_func( (void)counter; } - op(_UNPACK_SEQUENCE, (seq -- unused[oparg])) { - _PyStackRef *top = stack_pointer + oparg - 1; + op(_UNPACK_SEQUENCE, (seq -- output[oparg])) { + _PyStackRef *top = output + oparg; int res = _PyEval_UnpackIterableStackRef(tstate, seq, oparg, -1, top); DECREF_INPUTS(); ERROR_IF(res == 0, error); @@ -1401,9 +1401,8 @@ dummy_func( DECREF_INPUTS(); } - inst(UNPACK_EX, (seq -- unused[oparg & 0xFF], unused, unused[oparg >> 8])) { - int totalargs = 1 + (oparg & 0xFF) + (oparg >> 8); - _PyStackRef *top = stack_pointer + totalargs - 1; + inst(UNPACK_EX, (seq -- left[oparg & 0xFF], unused, right[oparg >> 8])) { + _PyStackRef *top = right + (oparg >> 8); int res = _PyEval_UnpackIterableStackRef(tstate, seq, oparg & 0xFF, oparg >> 8, top); DECREF_INPUTS(); ERROR_IF(res == 0, error); diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h index 62654035e80f50..f0acc3b6ea2ef4 100644 --- a/Python/executor_cases.c.h +++ b/Python/executor_cases.c.h @@ -1440,9 +1440,11 @@ case _UNPACK_SEQUENCE: { _PyStackRef seq; + _PyStackRef *output; oparg = CURRENT_OPARG(); seq = stack_pointer[-1]; - _PyStackRef *top = stack_pointer + oparg - 1; + output = &stack_pointer[-1]; + _PyStackRef *top = output + oparg; int res = _PyEval_UnpackIterableStackRef(tstate, seq, oparg, -1, top); PyStackRef_CLOSE(seq); if (res == 0) JUMP_TO_ERROR(); @@ -1532,14 +1534,15 @@ case _UNPACK_EX: { _PyStackRef seq; + _PyStackRef *right; oparg = CURRENT_OPARG(); seq = stack_pointer[-1]; - int totalargs = 1 + (oparg & 0xFF) + (oparg >> 8); - _PyStackRef *top = stack_pointer + totalargs - 1; + right = &stack_pointer[(oparg & 0xFF)]; + _PyStackRef *top = right + (oparg >> 8); int res = _PyEval_UnpackIterableStackRef(tstate, seq, oparg & 0xFF, oparg >> 8, top); PyStackRef_CLOSE(seq); if (res == 0) JUMP_TO_ERROR(); - stack_pointer += (oparg >> 8) + (oparg & 0xFF); + stack_pointer += (oparg & 0xFF) + (oparg >> 8); assert(WITHIN_STACK_BOUNDS()); break; } @@ -3595,6 +3598,7 @@ args = &stack_pointer[-oparg]; self_or_null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; + args = &stack_pointer[-oparg]; if (PyStackRef_TYPE(callable) == &PyMethod_Type && PyStackRef_IsNull(self_or_null)) { PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable); PyObject *self = ((PyMethodObject *)callable_o)->im_self; diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index 3c643f637ab095..ff8c4eab58f324 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -610,11 +610,19 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(values[_i]); } - if (true) { stack_pointer += -oparg; goto error; } + if (true) { + stack_pointer += -oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } } PyObject *list_o = _PyList_FromArraySteal(values_o, oparg); STACKREFS_TO_PYOBJECTS_CLEANUP(values_o); - if (list_o == NULL) { stack_pointer += -oparg; goto error; } + if (list_o == NULL) { + stack_pointer += -oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } list = PyStackRef_FromPyObjectSteal(list_o); stack_pointer[-oparg] = list; stack_pointer += 1 - oparg; @@ -634,7 +642,11 @@ for (int _i = oparg*2; --_i >= 0;) { PyStackRef_CLOSE(values[_i]); } - if (true) { stack_pointer += -oparg*2; goto error; } + if (true) { + stack_pointer += -oparg*2; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } } PyObject *map_o = _PyDict_FromItems( values_o, 2, @@ -644,7 +656,11 @@ for (int _i = oparg*2; --_i >= 0;) { PyStackRef_CLOSE(values[_i]); } - if (map_o == NULL) { stack_pointer += -oparg*2; goto error; } + if (map_o == NULL) { + stack_pointer += -oparg*2; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } map = PyStackRef_FromPyObjectSteal(map_o); stack_pointer[-oparg*2] = map; stack_pointer += 1 - oparg*2; @@ -664,7 +680,11 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(values[_i]); } - if (true) { stack_pointer += -oparg; goto error; } + if (true) { + stack_pointer += -oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } } int err = 0; for (int i = 0; i < oparg; i++) { @@ -676,7 +696,11 @@ } if (err != 0) { Py_DECREF(set_o); - if (true) { stack_pointer += -oparg; goto error; } + if (true) { + stack_pointer += -oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } } set = PyStackRef_FromPyObjectSteal(set_o); stack_pointer[-oparg] = set; @@ -703,7 +727,11 @@ PyStackRef_CLOSE(start); PyStackRef_CLOSE(stop); PyStackRef_XCLOSE(step); - if (slice_o == NULL) { stack_pointer += -2 - ((oparg == 3) ? 1 : 0); goto error; } + if (slice_o == NULL) { + stack_pointer += -2 - ((oparg == 3) ? 1 : 0); + assert(WITHIN_STACK_BOUNDS()); + goto error; + } slice = PyStackRef_FromPyObjectSteal(slice_o); stack_pointer[-2 - ((oparg == 3) ? 1 : 0)] = slice; stack_pointer += -1 - ((oparg == 3) ? 1 : 0); @@ -723,14 +751,22 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(pieces[_i]); } - if (true) { stack_pointer += -oparg; goto error; } + if (true) { + stack_pointer += -oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } } PyObject *str_o = _PyUnicode_JoinArray(&_Py_STR(empty), pieces_o, oparg); STACKREFS_TO_PYOBJECTS_CLEANUP(pieces_o); for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(pieces[_i]); } - if (str_o == NULL) { stack_pointer += -oparg; goto error; } + if (str_o == NULL) { + stack_pointer += -oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } str = PyStackRef_FromPyObjectSteal(str_o); stack_pointer[-oparg] = str; stack_pointer += 1 - oparg; @@ -746,7 +782,11 @@ _PyStackRef tup; values = &stack_pointer[-oparg]; PyObject *tup_o = _PyTuple_FromStackRefSteal(values, oparg); - if (tup_o == NULL) { stack_pointer += -oparg; goto error; } + if (tup_o == NULL) { + stack_pointer += -oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } tup = PyStackRef_FromPyObjectSteal(tup_o); stack_pointer[-oparg] = tup; stack_pointer += 1 - oparg; @@ -780,7 +820,6 @@ self_or_null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; { - args = &stack_pointer[-oparg]; uint16_t counter = read_u16(&this_instr[1].cache); (void)counter; #if ENABLE_SPECIALIZATION @@ -795,7 +834,9 @@ } /* Skip 2 cache entries */ // _MAYBE_EXPAND_METHOD + args = &stack_pointer[-oparg]; { + args = &stack_pointer[-oparg]; if (PyStackRef_TYPE(callable) == &PyMethod_Type && PyStackRef_IsNull(self_or_null)) { PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable); PyObject *self = ((PyMethodObject *)callable_o)->im_self; @@ -813,6 +854,7 @@ } } // _DO_CALL + args = &stack_pointer[-oparg]; self_or_null = maybe_self; callable = func; { @@ -852,7 +894,11 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - if (true) { stack_pointer += -2 - oparg; goto error; } + if (true) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } } PyObject *res_o = PyObject_Vectorcall( callable_o, args_o, @@ -881,7 +927,11 @@ for (int i = 0; i < total_args; i++) { PyStackRef_CLOSE(args[i]); } - if (res_o == NULL) { stack_pointer += -2 - oparg; goto error; } + if (res_o == NULL) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } res = PyStackRef_FromPyObjectSteal(res_o); } // _CHECK_PERIODIC @@ -1190,7 +1240,11 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - if (true) { stack_pointer += -2 - oparg; goto error; } + if (true) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } } PyObject *res_o = tp->tp_vectorcall((PyObject *)tp, args_o, total_args, NULL); STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); @@ -1199,7 +1253,11 @@ PyStackRef_CLOSE(args[i]); } PyStackRef_CLOSE(callable); - if (res_o == NULL) { stack_pointer += -2 - oparg; goto error; } + if (res_o == NULL) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } res = PyStackRef_FromPyObjectSteal(res_o); } // _CHECK_PERIODIC @@ -1247,7 +1305,11 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - if (true) { stack_pointer += -2 - oparg; goto error; } + if (true) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } } PyObject *res_o = ((PyCFunctionFast)(void(*)(void))cfunc)( PyCFunction_GET_SELF(callable_o), @@ -1260,7 +1322,11 @@ PyStackRef_CLOSE(args[i]); } PyStackRef_CLOSE(callable); - if (res_o == NULL) { stack_pointer += -2 - oparg; goto error; } + if (res_o == NULL) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } res = PyStackRef_FromPyObjectSteal(res_o); } // _CHECK_PERIODIC @@ -1310,7 +1376,11 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - if (true) { stack_pointer += -2 - oparg; goto error; } + if (true) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } } PyObject *res_o = cfunc(PyCFunction_GET_SELF(callable_o), args_o, total_args, NULL); STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); @@ -1320,7 +1390,11 @@ PyStackRef_CLOSE(args[i]); } PyStackRef_CLOSE(callable); - if (res_o == NULL) { stack_pointer += -2 - oparg; goto error; } + if (res_o == NULL) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } res = PyStackRef_FromPyObjectSteal(res_o); } // _CHECK_PERIODIC @@ -1370,7 +1444,11 @@ assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); PyStackRef_CLOSE(arg); PyStackRef_CLOSE(callable); - if (res_o == NULL) { stack_pointer += -2 - oparg; goto error; } + if (res_o == NULL) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } res = PyStackRef_FromPyObjectSteal(res_o); } // _CHECK_PERIODIC @@ -1467,7 +1545,11 @@ PyStackRef_CLOSE(callargs_st); PyStackRef_XCLOSE(kwargs_st); assert(PyStackRef_AsPyObjectBorrow(PEEK(2 + (oparg & 1))) == NULL); - if (PyStackRef_IsNull(result)) { stack_pointer += -3 - (oparg & 1); goto error; } + if (PyStackRef_IsNull(result)) { + stack_pointer += -3 - (oparg & 1); + assert(WITHIN_STACK_BOUNDS()); + goto error; + } stack_pointer[-3 - (oparg & 1)] = result; stack_pointer += -2 - (oparg & 1); assert(WITHIN_STACK_BOUNDS()); @@ -1625,7 +1707,11 @@ PyStackRef_CLOSE(args[_i]); } PyStackRef_CLOSE(kwnames); - if (true) { stack_pointer += -3 - oparg; goto error; } + if (true) { + stack_pointer += -3 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } } PyObject *res_o = PyObject_Vectorcall( callable_o, args_o, @@ -1655,7 +1741,11 @@ for (int i = 0; i < total_args; i++) { PyStackRef_CLOSE(args[i]); } - if (res_o == NULL) { stack_pointer += -3 - oparg; goto error; } + if (res_o == NULL) { + stack_pointer += -3 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-3 - oparg] = res; stack_pointer += -2 - oparg; @@ -1785,7 +1875,11 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - if (true) { stack_pointer += -2 - oparg; goto error; } + if (true) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } } PyObject *res_o = cfunc(self, (args_o + 1), nargs); STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); @@ -1795,7 +1889,11 @@ PyStackRef_CLOSE(args[i]); } PyStackRef_CLOSE(callable); - if (res_o == NULL) { stack_pointer += -2 - oparg; goto error; } + if (res_o == NULL) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } res = PyStackRef_FromPyObjectSteal(res_o); } // _CHECK_PERIODIC @@ -1848,7 +1946,11 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - if (true) { stack_pointer += -2 - oparg; goto error; } + if (true) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } } PyObject *res_o = cfunc(self, (args_o + 1), nargs, NULL); STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); @@ -1858,7 +1960,11 @@ PyStackRef_CLOSE(args[i]); } PyStackRef_CLOSE(callable); - if (res_o == NULL) { stack_pointer += -2 - oparg; goto error; } + if (res_o == NULL) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } res = PyStackRef_FromPyObjectSteal(res_o); } // _CHECK_PERIODIC @@ -1912,7 +2018,11 @@ assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); PyStackRef_CLOSE(self_stackref); PyStackRef_CLOSE(callable); - if (res_o == NULL) { stack_pointer += -2 - oparg; goto error; } + if (res_o == NULL) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } res = PyStackRef_FromPyObjectSteal(res_o); } // _CHECK_PERIODIC @@ -1969,7 +2079,11 @@ PyStackRef_CLOSE(self_stackref); PyStackRef_CLOSE(arg_stackref); PyStackRef_CLOSE(callable); - if (res_o == NULL) { stack_pointer += -2 - oparg; goto error; } + if (res_o == NULL) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } res = PyStackRef_FromPyObjectSteal(res_o); } // _CHECK_PERIODIC @@ -2022,7 +2136,11 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - if (true) { stack_pointer += -2 - oparg; goto error; } + if (true) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } } PyObject *res_o = PyObject_Vectorcall( callable_o, args_o, @@ -2034,7 +2152,11 @@ for (int i = 0; i < total_args; i++) { PyStackRef_CLOSE(args[i]); } - if (res_o == NULL) { stack_pointer += -2 - oparg; goto error; } + if (res_o == NULL) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } res = PyStackRef_FromPyObjectSteal(res_o); } // _CHECK_PERIODIC @@ -3526,6 +3648,7 @@ self_or_null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; { + args = &stack_pointer[-oparg]; if (PyStackRef_TYPE(callable) == &PyMethod_Type && PyStackRef_IsNull(self_or_null)) { PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable); PyObject *self = ((PyMethodObject *)callable_o)->im_self; @@ -3543,6 +3666,7 @@ } } // _MONITOR_CALL + args = &stack_pointer[-oparg]; { int is_meth = !PyStackRef_IsNull(maybe_self); PyObject *function = PyStackRef_AsPyObjectBorrow(func); @@ -3563,6 +3687,7 @@ if (err) goto error; } // _DO_CALL + args = &stack_pointer[-oparg]; self_or_null = maybe_self; callable = func; { @@ -3602,7 +3727,11 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - if (true) { stack_pointer += -2 - oparg; goto error; } + if (true) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } } PyObject *res_o = PyObject_Vectorcall( callable_o, args_o, @@ -3631,7 +3760,11 @@ for (int i = 0; i < total_args; i++) { PyStackRef_CLOSE(args[i]); } - if (res_o == NULL) { stack_pointer += -2 - oparg; goto error; } + if (res_o == NULL) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } res = PyStackRef_FromPyObjectSteal(res_o); } // _CHECK_PERIODIC @@ -5777,7 +5910,11 @@ "bad RAISE_VARARGS oparg"); break; } - if (true) { stack_pointer += -oparg; goto error; } + if (true) { + stack_pointer += -oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } } TARGET(RERAISE) { @@ -6834,13 +6971,14 @@ next_instr += 1; INSTRUCTION_STATS(UNPACK_EX); _PyStackRef seq; + _PyStackRef *right; seq = stack_pointer[-1]; - int totalargs = 1 + (oparg & 0xFF) + (oparg >> 8); - _PyStackRef *top = stack_pointer + totalargs - 1; + right = &stack_pointer[(oparg & 0xFF)]; + _PyStackRef *top = right + (oparg >> 8); int res = _PyEval_UnpackIterableStackRef(tstate, seq, oparg & 0xFF, oparg >> 8, top); PyStackRef_CLOSE(seq); if (res == 0) goto pop_1_error; - stack_pointer += (oparg >> 8) + (oparg & 0xFF); + stack_pointer += (oparg & 0xFF) + (oparg >> 8); assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -6853,6 +6991,7 @@ _Py_CODEUNIT *this_instr = next_instr - 2; (void)this_instr; _PyStackRef seq; + _PyStackRef *output; // _SPECIALIZE_UNPACK_SEQUENCE seq = stack_pointer[-1]; { @@ -6872,7 +7011,8 @@ } // _UNPACK_SEQUENCE { - _PyStackRef *top = stack_pointer + oparg - 1; + output = &stack_pointer[-1]; + _PyStackRef *top = output + oparg; int res = _PyEval_UnpackIterableStackRef(tstate, seq, oparg, -1, top); PyStackRef_CLOSE(seq); if (res == 0) goto pop_1_error; diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h index 4fa40ff861ba70..b704c9e77319e4 100644 --- a/Python/optimizer_cases.c.h +++ b/Python/optimizer_cases.c.h @@ -753,7 +753,7 @@ for (int i = 0; i < totalargs; i++) { values[i] = sym_new_unknown(ctx); } - stack_pointer += (oparg >> 8) + (oparg & 0xFF); + stack_pointer += (oparg & 0xFF) + (oparg >> 8); assert(WITHIN_STACK_BOUNDS()); break; } @@ -1607,6 +1607,7 @@ args = &stack_pointer[-oparg]; self_or_null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; + args = &stack_pointer[-oparg]; (void)callable; (void)self_or_null; (void)args; diff --git a/Tools/cases_generator/analyzer.py b/Tools/cases_generator/analyzer.py index 675dc0b9acaf45..f6625a3f7322d5 100644 --- a/Tools/cases_generator/analyzer.py +++ b/Tools/cases_generator/analyzer.py @@ -216,6 +216,7 @@ def is_super(self) -> bool: @dataclass class Instruction: + where: lexer.Token name: str parts: list[Part] _properties: Properties | None @@ -690,9 +691,10 @@ def add_op(op: parser.InstDef, uops: dict[str, Uop]) -> None: def add_instruction( - name: str, parts: list[Part], instructions: dict[str, Instruction] + where: lexer.Token, name: str, parts: list[Part], + instructions: dict[str, Instruction] ) -> None: - instructions[name] = Instruction(name, parts, None) + instructions[name] = Instruction(where, name, parts, None) def desugar_inst( @@ -720,7 +722,7 @@ def desugar_inst( parts.append(uop) else: parts[uop_index] = uop - add_instruction(name, parts, instructions) + add_instruction(inst.first_token, name, parts, instructions) def add_macro( @@ -741,7 +743,7 @@ def add_macro( case _: assert False assert parts - add_instruction(macro.name, parts, instructions) + add_instruction(macro.first_token, macro.name, parts, instructions) def add_family( diff --git a/Tools/cases_generator/generators_common.py b/Tools/cases_generator/generators_common.py index 587dc0d03eded5..ab8c99f1e25f97 100644 --- a/Tools/cases_generator/generators_common.py +++ b/Tools/cases_generator/generators_common.py @@ -8,7 +8,7 @@ StackItem, ) from cwriter import CWriter -from typing import Callable, Mapping, TextIO, Iterator, Tuple +from typing import Callable, Mapping, TextIO, Iterator from lexer import Token from stack import Stack @@ -25,7 +25,7 @@ def root_relative_path(filename: str) -> str: return filename -def type_and_null(var: StackItem) -> Tuple[str, str]: +def type_and_null(var: StackItem) -> tuple[str, str]: if var.type: return var.type, "NULL" elif var.is_array(): @@ -95,16 +95,23 @@ def replace_error( c_offset = stack.peek_offset() try: offset = -int(c_offset) - close = ";\n" except ValueError: - offset = None - out.emit(f"{{ stack_pointer += {c_offset}; ") - close = "; }\n" - out.emit("goto ") - if offset: - out.emit(f"pop_{offset}_") - out.emit(label) - out.emit(close) + offset = -1 + if offset > 0: + out.emit(f"goto pop_{offset}_") + out.emit(label) + out.emit(";\n") + elif offset == 0: + out.emit("goto ") + out.emit(label) + out.emit(";\n") + else: + out.emit("{\n") + stack.flush_locally(out) + out.emit("goto ") + out.emit(label) + out.emit(";\n") + out.emit("}\n") def replace_error_no_pop( diff --git a/Tools/cases_generator/optimizer_generator.py b/Tools/cases_generator/optimizer_generator.py index 6a66693b93305d..f6c2fea40f0dbb 100644 --- a/Tools/cases_generator/optimizer_generator.py +++ b/Tools/cases_generator/optimizer_generator.py @@ -23,7 +23,7 @@ from cwriter import CWriter from typing import TextIO, Iterator from lexer import Token -from stack import Stack, StackError +from stack import Local, Stack, StackError DEFAULT_OUTPUT = ROOT / "Python/optimizer_cases.c.h" DEFAULT_ABSTRACT_INPUT = (ROOT / "Python/optimizer_bytecodes.c").absolute().as_posix() @@ -98,19 +98,18 @@ def write_uop( debug: bool, skip_inputs: bool, ) -> None: + locals: dict[str, Local] = {} try: prototype = override if override else uop is_override = override is not None out.start_line() for var in reversed(prototype.stack.inputs): - res = stack.pop(var, extract_bits=True) + code, local = stack.pop(var, extract_bits=True) if not skip_inputs: - out.emit(res) - if not prototype.properties.stores_sp: - for i, var in enumerate(prototype.stack.outputs): - res = stack.push(var) - if not var.peek or is_override: - out.emit(res) + out.emit(code) + if local.defined: + locals[local.name] = local + out.emit(stack.define_output_arrays(prototype.stack.outputs)) if debug: args = [] for var in prototype.stack.inputs: @@ -135,10 +134,12 @@ def write_uop( else: emit_default(out, uop) - if prototype.properties.stores_sp: - for i, var in enumerate(prototype.stack.outputs): - if not var.peek or is_override: - out.emit(stack.push(var)) + for var in prototype.stack.outputs: + if var.name in locals: + local = locals[var.name] + else: + local = Local.local(var) + out.emit(stack.push(local)) out.start_line() stack.flush(out, cast_type="_Py_UopsSymbol *", extract_bits=True) except StackError as ex: diff --git a/Tools/cases_generator/parsing.py b/Tools/cases_generator/parsing.py index 8957838f7a90a1..5acad57f395ea6 100644 --- a/Tools/cases_generator/parsing.py +++ b/Tools/cases_generator/parsing.py @@ -60,6 +60,11 @@ def tokens(self) -> list[lx.Token]: end = context.end return tokens[begin:end] + @property + def first_token(self) -> lx.Token: + context = self.context + assert context is not None + return context.owner.tokens[context.begin] @dataclass class Block(Node): diff --git a/Tools/cases_generator/stack.py b/Tools/cases_generator/stack.py index 61dcfd3e30a510..d2d598a120892d 100644 --- a/Tools/cases_generator/stack.py +++ b/Tools/cases_generator/stack.py @@ -38,6 +38,43 @@ def var_size(var: StackItem) -> str: else: return "1" +@dataclass +class Local: + + item: StackItem + cached: bool + in_memory: bool + defined: bool + + @staticmethod + def unused(defn: StackItem) -> "Local": + return Local(defn, False, defn.is_array(), False) + + @staticmethod + def local(defn: StackItem) -> "Local": + array = defn.is_array() + return Local(defn, not array, array, True) + + @staticmethod + def redefinition(var: StackItem, prev: "Local") -> "Local": + assert var.is_array() == prev.is_array() + return Local(var, prev.cached, prev.in_memory, True) + + @property + def size(self) -> str: + return self.item.size + + @property + def name(self) -> str: + return self.item.name + + @property + def condition(self) -> str | None: + return self.item.condition + + def is_array(self) -> bool: + return self.item.is_array() + @dataclass class StackOffset: "The stack offset of the virtual base of the stack from the physical stack pointer" @@ -66,7 +103,11 @@ def __neg__(self) -> "StackOffset": def simplify(self) -> None: "Remove matching values from both the popped and pushed list" - if not self.popped or not self.pushed: + if not self.popped: + self.pushed.sort() + return + if not self.pushed: + self.popped.sort() return # Sort the list so the lexically largest element is last. popped = sorted(self.popped) @@ -87,6 +128,8 @@ def simplify(self) -> None: popped.append(pop) self.popped.extend(popped) self.pushed.extend(pushed) + self.pushed.sort() + self.popped.sort() def to_c(self) -> str: self.simplify() @@ -125,10 +168,10 @@ class Stack: def __init__(self) -> None: self.top_offset = StackOffset.empty() self.base_offset = StackOffset.empty() - self.variables: list[StackItem] = [] + self.variables: list[Local] = [] self.defined: set[str] = set() - def pop(self, var: StackItem, extract_bits: bool = False) -> str: + def pop(self, var: StackItem, extract_bits: bool = False) -> tuple[str, Local]: self.top_offset.pop(var) indirect = "&" if var.is_array() else "" if self.variables: @@ -141,21 +184,32 @@ def pop(self, var: StackItem, extract_bits: bool = False) -> str: if var.name in UNUSED: if popped.name not in UNUSED and popped.name in self.defined: raise StackError(f"Value is declared unused, but is already cached by prior operation") - return "" - if popped.name in UNUSED or popped.name not in self.defined: - self.defined.add(var.name) + return "", popped + if not var.used: + return "", popped + self.defined.add(var.name) + # Always define array variables as it is free, and their offset might have changed + if var.is_array(): + return ( + f"{var.name} = &stack_pointer[{self.top_offset.to_c()}];\n", + Local.redefinition(var, popped) + ) + if not popped.defined: return ( - f"{var.name} = {indirect}stack_pointer[{self.top_offset.to_c()}];\n" + f"{var.name} = stack_pointer[{self.top_offset.to_c()}];\n", + Local.redefinition(var, popped) ) else: - self.defined.add(var.name) if popped.name == var.name: - return "" + return "", popped else: - return f"{var.name} = {popped.name};\n" + return ( + f"{var.name} = {popped.name};\n", + Local.redefinition(var, popped) + ) self.base_offset.pop(var) if var.name in UNUSED or not var.used: - return "" + return "", Local.unused(var) self.defined.add(var.name) cast = f"({var.type})" if (not indirect and var.type) else "" bits = ".bits" if cast and not extract_bits else "" @@ -164,61 +218,80 @@ def pop(self, var: StackItem, extract_bits: bool = False) -> str: ) if var.condition: if var.condition == "1": - return f"{assign}\n" + assign = f"{assign}\n" elif var.condition == "0": - return "" + return "", Local.unused(var) else: - return f"if ({var.condition}) {{ {assign} }}\n" - return f"{assign}\n" + assign = f"if ({var.condition}) {{ {assign} }}\n" + else: + assign = f"{assign}\n" + in_memory = var.is_array() or var.peek + return assign, Local(var, not var.is_array(), in_memory, True) - def push(self, var: StackItem) -> str: + def push(self, var: Local) -> str: self.variables.append(var) - if var.is_array() and var.name not in self.defined and var.name not in UNUSED: + if var.is_array() and not var.defined and var.item.used: + assert var.in_memory + assert not var.cached c_offset = self.top_offset.to_c() - self.top_offset.push(var) + self.top_offset.push(var.item) self.defined.add(var.name) + var.defined = True return f"{var.name} = &stack_pointer[{c_offset}];\n" else: - self.top_offset.push(var) - if var.used: + self.top_offset.push(var.item) + if var.item.used: self.defined.add(var.name) return "" - def flush(self, out: CWriter, cast_type: str = "uintptr_t", extract_bits: bool = False) -> None: + def define_output_arrays(self, outputs: list[StackItem]) -> str: + res = [] + top_offset = self.top_offset.copy() + for var in outputs: + if var.is_array() and var.used and not var.peek: + c_offset = top_offset.to_c() + top_offset.push(var) + res.append(f"{var.name} = &stack_pointer[{c_offset}];\n") + else: + top_offset.push(var) + return "\n".join(res) + + @staticmethod + def _do_flush(out: CWriter, variables: list[Local], base_offset: StackOffset, top_offset: StackOffset, + cast_type: str = "uintptr_t", extract_bits: bool = False) -> None: out.start_line() - for var in self.variables: - if not var.peek: - cast = f"({cast_type})" if var.type else "" + for var in variables: + if var.cached and not var.in_memory and not var.item.peek and not var.name in UNUSED: + cast = f"({cast_type})" if var.item.type else "" bits = ".bits" if cast and not extract_bits else "" - if var.name not in UNUSED and not var.is_array(): - if var.condition: - if var.condition == "0": - continue - elif var.condition != "1": - out.emit(f"if ({var.condition}) ") - out.emit( - f"stack_pointer[{self.base_offset.to_c()}]{bits} = {cast}{var.name};\n" - ) - self.base_offset.push(var) - if self.base_offset.to_c() != self.top_offset.to_c(): - print("base", self.base_offset.to_c(), "top", self.top_offset.to_c()) + if var.condition == "0": + continue + if var.condition and var.condition != "1": + out.emit(f"if ({var.condition}) ") + out.emit( + f"stack_pointer[{base_offset.to_c()}]{bits} = {cast}{var.name};\n" + ) + base_offset.push(var.item) + if base_offset.to_c() != top_offset.to_c(): + print("base", base_offset, "top", top_offset) assert False - number = self.base_offset.to_c() + number = base_offset.to_c() if number != "0": out.emit(f"stack_pointer += {number};\n") out.emit("assert(WITHIN_STACK_BOUNDS());\n") + out.start_line() + + def flush_locally(self, out: CWriter, cast_type: str = "uintptr_t", extract_bits: bool = False) -> None: + self._do_flush(out, self.variables[:], self.base_offset.copy(), self.top_offset.copy(), cast_type, extract_bits) + + def flush(self, out: CWriter, cast_type: str = "uintptr_t", extract_bits: bool = False) -> None: + self._do_flush(out, self.variables, self.base_offset, self.top_offset, cast_type, extract_bits) self.variables = [] self.base_offset.clear() self.top_offset.clear() - out.start_line() def peek_offset(self) -> str: - peek = self.base_offset.copy() - for var in self.variables: - if not var.peek: - break - peek.push(var) - return peek.to_c() + return self.top_offset.to_c() def as_comment(self) -> str: return f"/* Variables: {[v.name for v in self.variables]}. Base offset: {self.base_offset.to_c()}. Top offset: {self.top_offset.to_c()} */" @@ -236,8 +309,15 @@ def stacks(inst : Instruction | PseudoInstruction) -> Iterator[StackEffect]: yield inst.stack for s in stacks(inst): + locals: dict[str, Local] = {} for var in reversed(s.inputs): - stack.pop(var) + _, local = stack.pop(var) + if var.name != "unused": + locals[local.name] = local for var in s.outputs: - stack.push(var) + if var.name in locals: + local = locals[var.name] + else: + local = Local.unused(var) + stack.push(local) return stack diff --git a/Tools/cases_generator/tier1_generator.py b/Tools/cases_generator/tier1_generator.py index 118f4b3a6eaa1c..1cdafbd35caea3 100644 --- a/Tools/cases_generator/tier1_generator.py +++ b/Tools/cases_generator/tier1_generator.py @@ -25,7 +25,7 @@ ) from cwriter import CWriter from typing import TextIO -from stack import Stack, StackError +from stack import Local, Stack, StackError, get_stack_effect DEFAULT_OUTPUT = ROOT / "Python/generated_cases.c.h" @@ -43,18 +43,12 @@ def declare_variable(var: StackItem, out: CWriter) -> None: def declare_variables(inst: Instruction, out: CWriter) -> None: - stack = Stack() - for part in inst.parts: - if not isinstance(part, Uop): - continue - try: - for var in reversed(part.stack.inputs): - stack.pop(var) - for var in part.stack.outputs: - stack.push(var) - except StackError as ex: - raise analysis_error(ex.args[0], part.body[0]) from None + try: + stack = get_stack_effect(inst) + except StackError as ex: + raise analysis_error(ex.args[0], inst.where) required = set(stack.defined) + required.discard("unused") for part in inst.parts: if not isinstance(part, Uop): continue @@ -80,16 +74,26 @@ def write_uop( stack.flush(out) return offset try: + locals: dict[str, Local] = {} out.start_line() if braces: out.emit(f"// {uop.name}\n") + peeks: list[Local] = [] for var in reversed(uop.stack.inputs): - out.emit(stack.pop(var)) + code, local = stack.pop(var) + out.emit(code) + if var.peek: + peeks.append(local) + if local.defined: + locals[local.name] = local + # Push back the peeks, so that they remain on the logical + # stack, but their values are cached. + while peeks: + stack.push(peeks.pop()) if braces: out.emit("{\n") - if not uop.properties.stores_sp: - for i, var in enumerate(uop.stack.outputs): - out.emit(stack.push(var)) + out.emit(stack.define_output_arrays(uop.stack.outputs)) + for cache in uop.caches: if cache.name != "unused": if cache.size == 4: @@ -105,16 +109,22 @@ def write_uop( out.emit(f"(void){cache.name};\n") offset += cache.size emit_tokens(out, uop, stack, inst) - if uop.properties.stores_sp: - for i, var in enumerate(uop.stack.outputs): - out.emit(stack.push(var)) + for i, var in enumerate(uop.stack.outputs): + if not var.peek: + if var.name in locals: + local = locals[var.name] + elif var.name == "unused": + local = Local.unused(var) + else: + local = Local.local(var) + out.emit(stack.push(local)) if braces: out.start_line() out.emit("}\n") # out.emit(stack.as_comment() + "\n") return offset except StackError as ex: - raise analysis_error(ex.args[0], uop.body[0]) from None + raise analysis_error(ex.args[0], uop.body[0]) def uses_this(inst: Instruction) -> bool: diff --git a/Tools/cases_generator/tier2_generator.py b/Tools/cases_generator/tier2_generator.py index 88ad0fd797f0cc..18bab2c13e7eb7 100644 --- a/Tools/cases_generator/tier2_generator.py +++ b/Tools/cases_generator/tier2_generator.py @@ -25,7 +25,7 @@ from cwriter import CWriter from typing import TextIO, Iterator from lexer import Token -from stack import Stack, StackError +from stack import Local, Stack, StackError, get_stack_effect DEFAULT_OUTPUT = ROOT / "Python/executor_cases.c.h" @@ -53,8 +53,9 @@ def declare_variables(uop: Uop, out: CWriter) -> None: for var in reversed(uop.stack.inputs): stack.pop(var) for var in uop.stack.outputs: - stack.push(var) + stack.push(Local.unused(var)) required = set(stack.defined) + required.discard("unused") for var in reversed(uop.stack.inputs): declare_variable(var, uop, required, out) for var in uop.stack.outputs: @@ -156,6 +157,7 @@ def tier2_replace_oparg( def write_uop(uop: Uop, out: CWriter, stack: Stack) -> None: + locals: dict[str, Local] = {} try: out.start_line() if uop.properties.oparg: @@ -165,10 +167,11 @@ def write_uop(uop: Uop, out: CWriter, stack: Stack) -> None: out.emit(f"oparg = {uop.properties.const_oparg};\n") out.emit(f"assert(oparg == CURRENT_OPARG());\n") for var in reversed(uop.stack.inputs): - out.emit(stack.pop(var)) - if not uop.properties.stores_sp: - for i, var in enumerate(uop.stack.outputs): - out.emit(stack.push(var)) + code, local = stack.pop(var) + out.emit(code) + if local.defined: + locals[local.name] = local + out.emit(stack.define_output_arrays(uop.stack.outputs)) for cache in uop.caches: if cache.name != "unused": if cache.size == 4: @@ -178,9 +181,12 @@ def write_uop(uop: Uop, out: CWriter, stack: Stack) -> None: cast = f"uint{cache.size*16}_t" out.emit(f"{type}{cache.name} = ({cast})CURRENT_OPERAND();\n") emit_tokens(out, uop, stack, None, TIER2_REPLACEMENT_FUNCTIONS) - if uop.properties.stores_sp: - for i, var in enumerate(uop.stack.outputs): - out.emit(stack.push(var)) + for i, var in enumerate(uop.stack.outputs): + if var.name in locals: + local = locals[var.name] + else: + local = Local.local(var) + out.emit(stack.push(local)) except StackError as ex: raise analysis_error(ex.args[0], uop.body[0]) from None From 58ffc4cf4aa4cfb47f8768a3c3eaf1dd7a7c4584 Mon Sep 17 00:00:00 2001 From: Rafael Fontenelle Date: Thu, 1 Aug 2024 06:25:16 -0300 Subject: [PATCH 43/70] gh-122384: Mark strings from Download page for translation (#122385) Co-authored-by: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com> --- Doc/tools/templates/download.html | 74 ++++++++++++++++++------------- 1 file changed, 42 insertions(+), 32 deletions(-) diff --git a/Doc/tools/templates/download.html b/Doc/tools/templates/download.html index c3114e584fa942..9f99eea6f3c773 100644 --- a/Doc/tools/templates/download.html +++ b/Doc/tools/templates/download.html @@ -1,5 +1,5 @@ {% extends "layout.html" %} -{% set title = 'Download' %} +{% set title = _('Download') %} {% if daily is defined %} {% set dlbase = pathto('archives', 1) %} {% else %} @@ -11,58 +11,68 @@ {% endif %} {% block body %} -

Download Python {{ release }} Documentation

+

{% trans %}Download Python {{ release }} Documentation{% endtrans %}

-{% if last_updated %}

Last updated on: {{ last_updated }}.

{% endif %} +{% if last_updated %}

{% trans %}Last updated on: {{ last_updated }}.{% endtrans %}

{% endif %} -

To download an archive containing all the documents for this version of -Python in one of various formats, follow one of links in this table.

+

{% trans %}To download an archive containing all the documents for this version of +Python in one of various formats, follow one of links in this table.{% endtrans %}

- - - - + + + + - - - + + + + - - - + + + + - - - + + + + - - - + + + + + + + + + - - + + +
FormatPacked as .zipPacked as .tar.bz2
PDF (US-Letter paper size)Download (ca. 17 MiB)Download (ca. 17 MiB)
{% trans %}Format{% endtrans %}{% trans %}Packed as .zip{% endtrans %}{% trans %}Packed as .tar.bz2{% endtrans %}
PDF (A4 paper size)Download (ca. 17 MiB)Download (ca. 17 MiB)
{% trans %}PDF (US-Letter paper size){% endtrans %}{% trans download_size="17" %}Download (ca. {{ download_size }} MiB){% endtrans %}{% trans download_size="17" %}Download (ca. {{ download_size }} MiB){% endtrans %}
HTMLDownload (ca. 13 MiB)Download (ca. 8 MiB)
{% trans %}PDF (A4 paper size){% endtrans %}{% trans download_size="17" %}Download (ca. {{ download_size }} MiB){% endtrans %}{% trans download_size="17" %}Download (ca. {{ download_size }} MiB){% endtrans %}
Plain textDownload (ca. 4 MiB)Download (ca. 3 MiB)
{% trans %}HTML{% endtrans %}{% trans download_size="13" %}Download (ca. {{ download_size }} MiB){% endtrans %}{% trans download_size="8" %}Download (ca. {{ download_size }} MiB){% endtrans %}
TexinfoDownload (ca. 9 MiB)Download (ca. 7 MiB)
{% trans %}Plain text{% endtrans %}{% trans download_size="4" %}Download (ca. {{ download_size }} MiB){% endtrans %}{% trans download_size="3" %}Download (ca. {{ download_size }} MiB){% endtrans %}
{% trans %}Texinfo{% endtrans %}{% trans download_size="9" %}Download (ca. {{ download_size }} MiB){% endtrans %}{% trans download_size="7" %}Download (ca. {{ download_size }} MiB){% endtrans %}
EPUBDownload (ca. 6 MiB)
{% trans %}EPUB{% endtrans %}{% trans download_size="6" %}Download (ca. {{ download_size }} MiB){% endtrans %}
-

These archives contain all the content in the documentation.

+

{% trans %}These archives contain all the content in the documentation.{% endtrans %}

-

Unpacking

+

{% trans %}Unpacking{% endtrans %}

-

Unix users should download the .tar.bz2 archives; these are bzipped tar +

{% trans %}Unix users should download the .tar.bz2 archives; these are bzipped tar archives and can be handled in the usual way using tar and the bzip2 program. The Info-ZIP unzip program can be used to handle the ZIP archives if desired. The .tar.bz2 archives provide the -best compression and fastest download times.

+best compression and fastest download times.{% endtrans %}

-

Windows users can use the ZIP archives since those are customary on that -platform. These are created on Unix using the Info-ZIP zip program.

+

{% trans %}Windows users can use the ZIP archives since those are customary on that +platform. These are created on Unix using the Info-ZIP zip program.{% endtrans %}

-

Problems

+

{% trans %}Problems{% endtrans %}

-

If you have comments or suggestions for the Python documentation, please send -email to docs@python.org.

+

{% trans %}If you have comments or suggestions for the Python documentation, please send +email to docs@python.org.{% endtrans %}

{% endblock %} From 88030861e216ac791725c8784752201d6fe31329 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9n=C3=A9dikt=20Tran?= <10796600+picnixz@users.noreply.github.com> Date: Thu, 1 Aug 2024 12:26:09 +0200 Subject: [PATCH 44/70] gh-122555: Remove removed functions from `Doc/data/refcounts.dat` (#122556) --- Doc/data/refcounts.dat | 10 ---------- Lib/importlib/_bootstrap.py | 1 - 2 files changed, 11 deletions(-) diff --git a/Doc/data/refcounts.dat b/Doc/data/refcounts.dat index a7d06e076a1b55..ccef104eeefde5 100644 --- a/Doc/data/refcounts.dat +++ b/Doc/data/refcounts.dat @@ -364,8 +364,6 @@ PyComplex_RealAsDouble:PyObject*:op:0: PyContext_CheckExact:int::: PyContext_CheckExact:PyObject*:o:0: -PyContext_ClearFreeList:int::: - PyContext_Copy:PyObject*::+1: PyContext_Copy:PyObject*:ctx:0: @@ -1030,8 +1028,6 @@ PyImport_AddModule:const char*:name:: PyImport_AddModuleObject:PyObject*::0:reference borrowed from sys.modules PyImport_AddModuleObject:PyObject*:name:0: -PyImport_Cleanup:void::: - PyImport_ExecCodeModule:PyObject*::+1: PyImport_ExecCodeModule:const char*:name:: PyImport_ExecCodeModule:PyObject*:co:0: @@ -2405,12 +2401,6 @@ PyUnicode_DATA:PyObject*:o:0: PyUnicode_GET_LENGTH:Py_ssize_t::: PyUnicode_GET_LENGTH:PyObject*:o:0: -PyUnicode_GET_SIZE:Py_ssize_t::: -PyUnicode_GET_SIZE:PyObject*:o:0: - -PyUnicode_GET_DATA_SIZE:Py_ssize_t::: -PyUnicode_GET_DATA_SIZE:PyObject*:o:0: - PyUnicode_KIND:int::: PyUnicode_KIND:PyObject*:o:0: diff --git a/Lib/importlib/_bootstrap.py b/Lib/importlib/_bootstrap.py index de5651f0a7fc36..b70d09b32abce6 100644 --- a/Lib/importlib/_bootstrap.py +++ b/Lib/importlib/_bootstrap.py @@ -1241,7 +1241,6 @@ def _find_spec(name, path, target=None): """Find a module's spec.""" meta_path = sys.meta_path if meta_path is None: - # PyImport_Cleanup() is running or has been called. raise ImportError("sys.meta_path is None, Python is likely " "shutting down") From fda6bd842a2b93a501526f1b830eb900d935ac73 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Thu, 1 Aug 2024 14:12:33 +0200 Subject: [PATCH 45/70] Replace PyObject_Del with PyObject_Free (#122453) PyObject_Del() is just a alias to PyObject_Free() kept for backward compatibility. Use directly PyObject_Free() instead. --- Doc/c-api/typeobj.rst | 4 ++-- Modules/_testcapi/gc.c | 2 +- Modules/_testcapimodule.c | 6 +++--- Objects/bytearrayobject.c | 2 +- Objects/bytesobject.c | 2 +- Objects/codeobject.c | 4 ++-- Objects/complexobject.c | 2 +- Objects/fileobject.c | 2 +- Objects/odictobject.c | 2 +- Objects/rangeobject.c | 2 +- Objects/typeobject.c | 4 ++-- Objects/unicodeobject.c | 2 +- Python/optimizer.c | 2 +- 13 files changed, 18 insertions(+), 18 deletions(-) diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst index 0091e084308245..b7b1418df513c6 100644 --- a/Doc/c-api/typeobj.rst +++ b/Doc/c-api/typeobj.rst @@ -650,7 +650,7 @@ and :c:data:`PyType_Type` effectively act as defaults.) (doesn't have the :c:macro:`Py_TPFLAGS_BASETYPE` flag bit set), it is permissible to call the object deallocator directly instead of via :c:member:`~PyTypeObject.tp_free`. The object deallocator should be the one used to allocate the - instance; this is normally :c:func:`PyObject_Del` if the instance was allocated + instance; this is normally :c:func:`PyObject_Free` if the instance was allocated using :c:macro:`PyObject_New` or :c:macro:`PyObject_NewVar`, or :c:func:`PyObject_GC_Del` if the instance was allocated using :c:macro:`PyObject_GC_New` or :c:macro:`PyObject_GC_NewVar`. @@ -1954,7 +1954,7 @@ and :c:data:`PyType_Type` effectively act as defaults.) match :c:func:`PyType_GenericAlloc` and the value of the :c:macro:`Py_TPFLAGS_HAVE_GC` flag bit. - For static subtypes, :c:data:`PyBaseObject_Type` uses :c:func:`PyObject_Del`. + For static subtypes, :c:data:`PyBaseObject_Type` uses :c:func:`PyObject_Free`. .. c:member:: inquiry PyTypeObject.tp_is_gc diff --git a/Modules/_testcapi/gc.c b/Modules/_testcapi/gc.c index b472a4185a98af..7e33e0d4861e84 100644 --- a/Modules/_testcapi/gc.c +++ b/Modules/_testcapi/gc.c @@ -72,7 +72,7 @@ without_gc(PyObject *Py_UNUSED(self), PyObject *obj) if (PyType_IS_GC(tp)) { // Don't try this at home, kids: tp->tp_flags -= Py_TPFLAGS_HAVE_GC; - tp->tp_free = PyObject_Del; + tp->tp_free = PyObject_Free; tp->tp_traverse = NULL; tp->tp_clear = NULL; } diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index 5ebcfef6143e02..4a371a5ce33ebe 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -289,7 +289,7 @@ static PyTypeObject _HashInheritanceTester_Type = { "hashinheritancetester", /* Name of this type */ sizeof(PyObject), /* Basic object size */ 0, /* Item size for varobject */ - (destructor)PyObject_Del, /* tp_dealloc */ + (destructor)PyObject_Free, /* tp_dealloc */ 0, /* tp_vectorcall_offset */ 0, /* tp_getattr */ 0, /* tp_setattr */ @@ -3587,7 +3587,7 @@ static PyTypeObject matmulType = { 0, 0, PyType_GenericNew, /* tp_new */ - PyObject_Del, /* tp_free */ + PyObject_Free, /* tp_free */ }; typedef struct { @@ -3699,7 +3699,7 @@ static PyTypeObject awaitType = { 0, 0, awaitObject_new, /* tp_new */ - PyObject_Del, /* tp_free */ + PyObject_Free, /* tp_free */ }; diff --git a/Objects/bytearrayobject.c b/Objects/bytearrayobject.c index 80679f93cd4c13..a80e4670665a22 100644 --- a/Objects/bytearrayobject.c +++ b/Objects/bytearrayobject.c @@ -2426,7 +2426,7 @@ PyTypeObject PyByteArray_Type = { (initproc)bytearray___init__, /* tp_init */ PyType_GenericAlloc, /* tp_alloc */ PyType_GenericNew, /* tp_new */ - PyObject_Del, /* tp_free */ + PyObject_Free, /* tp_free */ }; /*********************** Bytearray Iterator ****************************/ diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c index 459df6ceacf3a8..e88b199d89f758 100644 --- a/Objects/bytesobject.c +++ b/Objects/bytesobject.c @@ -3066,7 +3066,7 @@ PyTypeObject PyBytes_Type = { 0, /* tp_init */ bytes_alloc, /* tp_alloc */ bytes_new, /* tp_new */ - PyObject_Del, /* tp_free */ + PyObject_Free, /* tp_free */ }; void diff --git a/Objects/codeobject.c b/Objects/codeobject.c index d45ba5ed4a9c06..6423a4214bfa9c 100644 --- a/Objects/codeobject.c +++ b/Objects/codeobject.c @@ -1352,7 +1352,7 @@ PyTypeObject _PyLineIterator = { 0, /* tp_init */ 0, /* tp_alloc */ 0, /* tp_new */ - PyObject_Del, /* tp_free */ + PyObject_Free, /* tp_free */ }; static lineiterator * @@ -1443,7 +1443,7 @@ PyTypeObject _PyPositionsIterator = { 0, /* tp_init */ 0, /* tp_alloc */ 0, /* tp_new */ - PyObject_Del, /* tp_free */ + PyObject_Free, /* tp_free */ }; static PyObject* diff --git a/Objects/complexobject.c b/Objects/complexobject.c index 7c8a6bd9dfcd3f..4a8dac6c53f529 100644 --- a/Objects/complexobject.c +++ b/Objects/complexobject.c @@ -1247,5 +1247,5 @@ PyTypeObject PyComplex_Type = { 0, /* tp_init */ PyType_GenericAlloc, /* tp_alloc */ actual_complex_new, /* tp_new */ - PyObject_Del, /* tp_free */ + PyObject_Free, /* tp_free */ }; diff --git a/Objects/fileobject.c b/Objects/fileobject.c index bae49d367b65ee..c377d1bb28b56f 100644 --- a/Objects/fileobject.c +++ b/Objects/fileobject.c @@ -462,7 +462,7 @@ PyTypeObject PyStdPrinter_Type = { 0, /* tp_init */ PyType_GenericAlloc, /* tp_alloc */ 0, /* tp_new */ - PyObject_Del, /* tp_free */ + PyObject_Free, /* tp_free */ }; diff --git a/Objects/odictobject.c b/Objects/odictobject.c index 30277aa0c23883..a9b801e70c9810 100644 --- a/Objects/odictobject.c +++ b/Objects/odictobject.c @@ -276,7 +276,7 @@ tp_dictoffset (__dict__) - - tp_init __init__ object_init dict_init tp_alloc - PyType_GenericAlloc (repeated) tp_new __new__ object_new dict_new -tp_free - PyObject_Del PyObject_GC_Del +tp_free - PyObject_Free PyObject_GC_Del ================= ================ =================== ================ Relevant Methods diff --git a/Objects/rangeobject.c b/Objects/rangeobject.c index 9727b4f47b53a1..1318ce0319d438 100644 --- a/Objects/rangeobject.c +++ b/Objects/rangeobject.c @@ -899,7 +899,7 @@ PyTypeObject PyRangeIter_Type = { sizeof(_PyRangeIterObject), /* tp_basicsize */ 0, /* tp_itemsize */ /* methods */ - (destructor)PyObject_Del, /* tp_dealloc */ + (destructor)PyObject_Free, /* tp_dealloc */ 0, /* tp_vectorcall_offset */ 0, /* tp_getattr */ 0, /* tp_setattr */ diff --git a/Objects/typeobject.c b/Objects/typeobject.c index 5b0a466f913495..a2d82e65b6ad9f 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -7456,7 +7456,7 @@ PyTypeObject PyBaseObject_Type = { object_init, /* tp_init */ PyType_GenericAlloc, /* tp_alloc */ object_new, /* tp_new */ - PyObject_Del, /* tp_free */ + PyObject_Free, /* tp_free */ }; @@ -8180,7 +8180,7 @@ type_ready_inherit(PyTypeObject *type) /* Sanity check for tp_free. */ if (_PyType_IS_GC(type) && (type->tp_flags & Py_TPFLAGS_BASETYPE) && - (type->tp_free == NULL || type->tp_free == PyObject_Del)) + (type->tp_free == NULL || type->tp_free == PyObject_Free)) { /* This base class needs to call tp_free, but doesn't have * one, or its tp_free is for non-gc'ed objects. diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index ffb879a68745b1..12578812a762f6 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -15265,7 +15265,7 @@ PyTypeObject PyUnicode_Type = { 0, /* tp_init */ 0, /* tp_alloc */ unicode_new, /* tp_new */ - PyObject_Del, /* tp_free */ + PyObject_Free, /* tp_free */ .tp_vectorcall = unicode_vectorcall, }; diff --git a/Python/optimizer.c b/Python/optimizer.c index ce8a36575cde1d..9d0381357f2123 100644 --- a/Python/optimizer.c +++ b/Python/optimizer.c @@ -1374,7 +1374,7 @@ PyTypeObject _PyCounterOptimizer_Type = { .tp_itemsize = 0, .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_DISALLOW_INSTANTIATION, .tp_methods = counter_optimizer_methods, - .tp_dealloc = (destructor)PyObject_Del, + .tp_dealloc = (destructor)PyObject_Free, }; PyObject * From df13a1821a90fcfb75eca59aad6af1f0893b1e77 Mon Sep 17 00:00:00 2001 From: Mark Shannon Date: Fri, 2 Aug 2024 00:19:05 +0100 Subject: [PATCH 46/70] GH-118095: Add tier two support for BINARY_SUBSCR_GETITEM (GH-120793) --- Include/internal/pycore_opcode_metadata.h | 3 +- Include/internal/pycore_optimizer.h | 10 + Include/internal/pycore_uop_ids.h | 293 +++++++++++----------- Include/internal/pycore_uop_metadata.h | 8 + Python/bytecodes.c | 42 ++-- Python/executor_cases.c.h | 52 +++- Python/generated_cases.c.h | 80 ++++-- Python/optimizer.c | 12 +- Python/optimizer_analysis.c | 11 +- Python/optimizer_cases.c.h | 13 +- 10 files changed, 317 insertions(+), 207 deletions(-) diff --git a/Include/internal/pycore_opcode_metadata.h b/Include/internal/pycore_opcode_metadata.h index 464d3500890e53..d8e5034268b343 100644 --- a/Include/internal/pycore_opcode_metadata.h +++ b/Include/internal/pycore_opcode_metadata.h @@ -505,7 +505,7 @@ int _PyOpcode_num_pushed(int opcode, int oparg) { case BINARY_SUBSCR_DICT: return 1; case BINARY_SUBSCR_GETITEM: - return 1; + return 0; case BINARY_SUBSCR_LIST_INT: return 1; case BINARY_SUBSCR_STR_INT: @@ -1231,6 +1231,7 @@ _PyOpcode_macro_expansion[256] = { [BINARY_SLICE] = { .nuops = 1, .uops = { { _BINARY_SLICE, 0, 0 } } }, [BINARY_SUBSCR] = { .nuops = 1, .uops = { { _BINARY_SUBSCR, 0, 0 } } }, [BINARY_SUBSCR_DICT] = { .nuops = 1, .uops = { { _BINARY_SUBSCR_DICT, 0, 0 } } }, + [BINARY_SUBSCR_GETITEM] = { .nuops = 4, .uops = { { _CHECK_PEP_523, 0, 0 }, { _BINARY_SUBSCR_CHECK_FUNC, 0, 0 }, { _BINARY_SUBSCR_INIT_CALL, 0, 0 }, { _PUSH_FRAME, 0, 0 } } }, [BINARY_SUBSCR_LIST_INT] = { .nuops = 1, .uops = { { _BINARY_SUBSCR_LIST_INT, 0, 0 } } }, [BINARY_SUBSCR_STR_INT] = { .nuops = 1, .uops = { { _BINARY_SUBSCR_STR_INT, 0, 0 } } }, [BINARY_SUBSCR_TUPLE_INT] = { .nuops = 1, .uops = { { _BINARY_SUBSCR_TUPLE_INT, 0, 0 } } }, diff --git a/Include/internal/pycore_optimizer.h b/Include/internal/pycore_optimizer.h index bcbb8b73706359..b6da27c067727f 100644 --- a/Include/internal/pycore_optimizer.h +++ b/Include/internal/pycore_optimizer.h @@ -259,6 +259,16 @@ PyAPI_FUNC(PyObject *) _Py_uop_symbols_test(PyObject *self, PyObject *ignored); PyAPI_FUNC(int) _PyOptimizer_Optimize(struct _PyInterpreterFrame *frame, _Py_CODEUNIT *start, _PyStackRef *stack_pointer, _PyExecutorObject **exec_ptr); +static inline int is_terminator(const _PyUOpInstruction *uop) +{ + int opcode = uop->opcode; + return ( + opcode == _EXIT_TRACE || + opcode == _JUMP_TO_TOP || + opcode == _DYNAMIC_EXIT + ); +} + #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_uop_ids.h b/Include/internal/pycore_uop_ids.h index d6c910255eb87b..27d7f96863fa8c 100644 --- a/Include/internal/pycore_uop_ids.h +++ b/Include/internal/pycore_uop_ids.h @@ -22,8 +22,9 @@ extern "C" { #define _BINARY_OP_SUBTRACT_INT 310 #define _BINARY_SLICE BINARY_SLICE #define _BINARY_SUBSCR 311 +#define _BINARY_SUBSCR_CHECK_FUNC 312 #define _BINARY_SUBSCR_DICT BINARY_SUBSCR_DICT -#define _BINARY_SUBSCR_GETITEM BINARY_SUBSCR_GETITEM +#define _BINARY_SUBSCR_INIT_CALL 313 #define _BINARY_SUBSCR_LIST_INT BINARY_SUBSCR_LIST_INT #define _BINARY_SUBSCR_STR_INT BINARY_SUBSCR_STR_INT #define _BINARY_SUBSCR_TUPLE_INT BINARY_SUBSCR_TUPLE_INT @@ -34,10 +35,10 @@ extern "C" { #define _BUILD_STRING BUILD_STRING #define _BUILD_TUPLE BUILD_TUPLE #define _CALL_ALLOC_AND_ENTER_INIT CALL_ALLOC_AND_ENTER_INIT -#define _CALL_BUILTIN_CLASS 312 -#define _CALL_BUILTIN_FAST 313 -#define _CALL_BUILTIN_FAST_WITH_KEYWORDS 314 -#define _CALL_BUILTIN_O 315 +#define _CALL_BUILTIN_CLASS 314 +#define _CALL_BUILTIN_FAST 315 +#define _CALL_BUILTIN_FAST_WITH_KEYWORDS 316 +#define _CALL_BUILTIN_O 317 #define _CALL_FUNCTION_EX CALL_FUNCTION_EX #define _CALL_INTRINSIC_1 CALL_INTRINSIC_1 #define _CALL_INTRINSIC_2 CALL_INTRINSIC_2 @@ -45,38 +46,38 @@ extern "C" { #define _CALL_KW CALL_KW #define _CALL_LEN CALL_LEN #define _CALL_LIST_APPEND CALL_LIST_APPEND -#define _CALL_METHOD_DESCRIPTOR_FAST 316 -#define _CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS 317 -#define _CALL_METHOD_DESCRIPTOR_NOARGS 318 -#define _CALL_METHOD_DESCRIPTOR_O 319 -#define _CALL_NON_PY_GENERAL 320 -#define _CALL_STR_1 321 -#define _CALL_TUPLE_1 322 +#define _CALL_METHOD_DESCRIPTOR_FAST 318 +#define _CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS 319 +#define _CALL_METHOD_DESCRIPTOR_NOARGS 320 +#define _CALL_METHOD_DESCRIPTOR_O 321 +#define _CALL_NON_PY_GENERAL 322 +#define _CALL_STR_1 323 +#define _CALL_TUPLE_1 324 #define _CALL_TYPE_1 CALL_TYPE_1 -#define _CHECK_ATTR_CLASS 323 -#define _CHECK_ATTR_METHOD_LAZY_DICT 324 -#define _CHECK_ATTR_MODULE 325 -#define _CHECK_ATTR_WITH_HINT 326 -#define _CHECK_CALL_BOUND_METHOD_EXACT_ARGS 327 +#define _CHECK_ATTR_CLASS 325 +#define _CHECK_ATTR_METHOD_LAZY_DICT 326 +#define _CHECK_ATTR_MODULE 327 +#define _CHECK_ATTR_WITH_HINT 328 +#define _CHECK_CALL_BOUND_METHOD_EXACT_ARGS 329 #define _CHECK_EG_MATCH CHECK_EG_MATCH #define _CHECK_EXC_MATCH CHECK_EXC_MATCH -#define _CHECK_FUNCTION 328 -#define _CHECK_FUNCTION_EXACT_ARGS 329 -#define _CHECK_FUNCTION_VERSION 330 -#define _CHECK_IS_NOT_PY_CALLABLE 331 -#define _CHECK_MANAGED_OBJECT_HAS_VALUES 332 -#define _CHECK_METHOD_VERSION 333 -#define _CHECK_PEP_523 334 -#define _CHECK_PERIODIC 335 -#define _CHECK_STACK_SPACE 336 -#define _CHECK_STACK_SPACE_OPERAND 337 -#define _CHECK_VALIDITY 338 -#define _CHECK_VALIDITY_AND_SET_IP 339 -#define _COMPARE_OP 340 -#define _COMPARE_OP_FLOAT 341 -#define _COMPARE_OP_INT 342 -#define _COMPARE_OP_STR 343 -#define _CONTAINS_OP 344 +#define _CHECK_FUNCTION 330 +#define _CHECK_FUNCTION_EXACT_ARGS 331 +#define _CHECK_FUNCTION_VERSION 332 +#define _CHECK_IS_NOT_PY_CALLABLE 333 +#define _CHECK_MANAGED_OBJECT_HAS_VALUES 334 +#define _CHECK_METHOD_VERSION 335 +#define _CHECK_PEP_523 336 +#define _CHECK_PERIODIC 337 +#define _CHECK_STACK_SPACE 338 +#define _CHECK_STACK_SPACE_OPERAND 339 +#define _CHECK_VALIDITY 340 +#define _CHECK_VALIDITY_AND_SET_IP 341 +#define _COMPARE_OP 342 +#define _COMPARE_OP_FLOAT 343 +#define _COMPARE_OP_INT 344 +#define _COMPARE_OP_STR 345 +#define _CONTAINS_OP 346 #define _CONTAINS_OP_DICT CONTAINS_OP_DICT #define _CONTAINS_OP_SET CONTAINS_OP_SET #define _CONVERT_VALUE CONVERT_VALUE @@ -88,56 +89,56 @@ extern "C" { #define _DELETE_GLOBAL DELETE_GLOBAL #define _DELETE_NAME DELETE_NAME #define _DELETE_SUBSCR DELETE_SUBSCR -#define _DEOPT 345 +#define _DEOPT 347 #define _DICT_MERGE DICT_MERGE #define _DICT_UPDATE DICT_UPDATE -#define _DO_CALL 346 -#define _DYNAMIC_EXIT 347 +#define _DO_CALL 348 +#define _DYNAMIC_EXIT 349 #define _END_SEND END_SEND -#define _ERROR_POP_N 348 +#define _ERROR_POP_N 350 #define _EXIT_INIT_CHECK EXIT_INIT_CHECK -#define _EXPAND_METHOD 349 -#define _FATAL_ERROR 350 +#define _EXPAND_METHOD 351 +#define _FATAL_ERROR 352 #define _FORMAT_SIMPLE FORMAT_SIMPLE #define _FORMAT_WITH_SPEC FORMAT_WITH_SPEC -#define _FOR_ITER 351 -#define _FOR_ITER_GEN_FRAME 352 -#define _FOR_ITER_TIER_TWO 353 +#define _FOR_ITER 353 +#define _FOR_ITER_GEN_FRAME 354 +#define _FOR_ITER_TIER_TWO 355 #define _GET_AITER GET_AITER #define _GET_ANEXT GET_ANEXT #define _GET_AWAITABLE GET_AWAITABLE #define _GET_ITER GET_ITER #define _GET_LEN GET_LEN #define _GET_YIELD_FROM_ITER GET_YIELD_FROM_ITER -#define _GUARD_BOTH_FLOAT 354 -#define _GUARD_BOTH_INT 355 -#define _GUARD_BOTH_UNICODE 356 -#define _GUARD_BUILTINS_VERSION 357 -#define _GUARD_DORV_NO_DICT 358 -#define _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT 359 -#define _GUARD_GLOBALS_VERSION 360 -#define _GUARD_IS_FALSE_POP 361 -#define _GUARD_IS_NONE_POP 362 -#define _GUARD_IS_NOT_NONE_POP 363 -#define _GUARD_IS_TRUE_POP 364 -#define _GUARD_KEYS_VERSION 365 -#define _GUARD_NOS_FLOAT 366 -#define _GUARD_NOS_INT 367 -#define _GUARD_NOT_EXHAUSTED_LIST 368 -#define _GUARD_NOT_EXHAUSTED_RANGE 369 -#define _GUARD_NOT_EXHAUSTED_TUPLE 370 -#define _GUARD_TOS_FLOAT 371 -#define _GUARD_TOS_INT 372 -#define _GUARD_TYPE_VERSION 373 +#define _GUARD_BOTH_FLOAT 356 +#define _GUARD_BOTH_INT 357 +#define _GUARD_BOTH_UNICODE 358 +#define _GUARD_BUILTINS_VERSION 359 +#define _GUARD_DORV_NO_DICT 360 +#define _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT 361 +#define _GUARD_GLOBALS_VERSION 362 +#define _GUARD_IS_FALSE_POP 363 +#define _GUARD_IS_NONE_POP 364 +#define _GUARD_IS_NOT_NONE_POP 365 +#define _GUARD_IS_TRUE_POP 366 +#define _GUARD_KEYS_VERSION 367 +#define _GUARD_NOS_FLOAT 368 +#define _GUARD_NOS_INT 369 +#define _GUARD_NOT_EXHAUSTED_LIST 370 +#define _GUARD_NOT_EXHAUSTED_RANGE 371 +#define _GUARD_NOT_EXHAUSTED_TUPLE 372 +#define _GUARD_TOS_FLOAT 373 +#define _GUARD_TOS_INT 374 +#define _GUARD_TYPE_VERSION 375 #define _IMPORT_FROM IMPORT_FROM #define _IMPORT_NAME IMPORT_NAME -#define _INIT_CALL_BOUND_METHOD_EXACT_ARGS 374 -#define _INIT_CALL_PY_EXACT_ARGS 375 -#define _INIT_CALL_PY_EXACT_ARGS_0 376 -#define _INIT_CALL_PY_EXACT_ARGS_1 377 -#define _INIT_CALL_PY_EXACT_ARGS_2 378 -#define _INIT_CALL_PY_EXACT_ARGS_3 379 -#define _INIT_CALL_PY_EXACT_ARGS_4 380 +#define _INIT_CALL_BOUND_METHOD_EXACT_ARGS 376 +#define _INIT_CALL_PY_EXACT_ARGS 377 +#define _INIT_CALL_PY_EXACT_ARGS_0 378 +#define _INIT_CALL_PY_EXACT_ARGS_1 379 +#define _INIT_CALL_PY_EXACT_ARGS_2 380 +#define _INIT_CALL_PY_EXACT_ARGS_3 381 +#define _INIT_CALL_PY_EXACT_ARGS_4 382 #define _INSTRUMENTED_CALL_FUNCTION_EX INSTRUMENTED_CALL_FUNCTION_EX #define _INSTRUMENTED_CALL_KW INSTRUMENTED_CALL_KW #define _INSTRUMENTED_FOR_ITER INSTRUMENTED_FOR_ITER @@ -151,65 +152,65 @@ extern "C" { #define _INSTRUMENTED_POP_JUMP_IF_NOT_NONE INSTRUMENTED_POP_JUMP_IF_NOT_NONE #define _INSTRUMENTED_POP_JUMP_IF_TRUE INSTRUMENTED_POP_JUMP_IF_TRUE #define _INSTRUMENTED_RESUME INSTRUMENTED_RESUME -#define _INTERNAL_INCREMENT_OPT_COUNTER 381 -#define _IS_NONE 382 +#define _INTERNAL_INCREMENT_OPT_COUNTER 383 +#define _IS_NONE 384 #define _IS_OP IS_OP -#define _ITER_CHECK_LIST 383 -#define _ITER_CHECK_RANGE 384 -#define _ITER_CHECK_TUPLE 385 -#define _ITER_JUMP_LIST 386 -#define _ITER_JUMP_RANGE 387 -#define _ITER_JUMP_TUPLE 388 -#define _ITER_NEXT_LIST 389 -#define _ITER_NEXT_RANGE 390 -#define _ITER_NEXT_TUPLE 391 -#define _JUMP_TO_TOP 392 +#define _ITER_CHECK_LIST 385 +#define _ITER_CHECK_RANGE 386 +#define _ITER_CHECK_TUPLE 387 +#define _ITER_JUMP_LIST 388 +#define _ITER_JUMP_RANGE 389 +#define _ITER_JUMP_TUPLE 390 +#define _ITER_NEXT_LIST 391 +#define _ITER_NEXT_RANGE 392 +#define _ITER_NEXT_TUPLE 393 +#define _JUMP_TO_TOP 394 #define _LIST_APPEND LIST_APPEND #define _LIST_EXTEND LIST_EXTEND -#define _LOAD_ATTR 393 -#define _LOAD_ATTR_CLASS 394 -#define _LOAD_ATTR_CLASS_0 395 -#define _LOAD_ATTR_CLASS_1 396 +#define _LOAD_ATTR 395 +#define _LOAD_ATTR_CLASS 396 +#define _LOAD_ATTR_CLASS_0 397 +#define _LOAD_ATTR_CLASS_1 398 #define _LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN -#define _LOAD_ATTR_INSTANCE_VALUE 397 -#define _LOAD_ATTR_INSTANCE_VALUE_0 398 -#define _LOAD_ATTR_INSTANCE_VALUE_1 399 -#define _LOAD_ATTR_METHOD_LAZY_DICT 400 -#define _LOAD_ATTR_METHOD_NO_DICT 401 -#define _LOAD_ATTR_METHOD_WITH_VALUES 402 -#define _LOAD_ATTR_MODULE 403 -#define _LOAD_ATTR_NONDESCRIPTOR_NO_DICT 404 -#define _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES 405 -#define _LOAD_ATTR_PROPERTY_FRAME 406 -#define _LOAD_ATTR_SLOT 407 -#define _LOAD_ATTR_SLOT_0 408 -#define _LOAD_ATTR_SLOT_1 409 -#define _LOAD_ATTR_WITH_HINT 410 +#define _LOAD_ATTR_INSTANCE_VALUE 399 +#define _LOAD_ATTR_INSTANCE_VALUE_0 400 +#define _LOAD_ATTR_INSTANCE_VALUE_1 401 +#define _LOAD_ATTR_METHOD_LAZY_DICT 402 +#define _LOAD_ATTR_METHOD_NO_DICT 403 +#define _LOAD_ATTR_METHOD_WITH_VALUES 404 +#define _LOAD_ATTR_MODULE 405 +#define _LOAD_ATTR_NONDESCRIPTOR_NO_DICT 406 +#define _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES 407 +#define _LOAD_ATTR_PROPERTY_FRAME 408 +#define _LOAD_ATTR_SLOT 409 +#define _LOAD_ATTR_SLOT_0 410 +#define _LOAD_ATTR_SLOT_1 411 +#define _LOAD_ATTR_WITH_HINT 412 #define _LOAD_BUILD_CLASS LOAD_BUILD_CLASS #define _LOAD_COMMON_CONSTANT LOAD_COMMON_CONSTANT #define _LOAD_CONST LOAD_CONST -#define _LOAD_CONST_INLINE 411 -#define _LOAD_CONST_INLINE_BORROW 412 -#define _LOAD_CONST_INLINE_BORROW_WITH_NULL 413 -#define _LOAD_CONST_INLINE_WITH_NULL 414 +#define _LOAD_CONST_INLINE 413 +#define _LOAD_CONST_INLINE_BORROW 414 +#define _LOAD_CONST_INLINE_BORROW_WITH_NULL 415 +#define _LOAD_CONST_INLINE_WITH_NULL 416 #define _LOAD_DEREF LOAD_DEREF -#define _LOAD_FAST 415 -#define _LOAD_FAST_0 416 -#define _LOAD_FAST_1 417 -#define _LOAD_FAST_2 418 -#define _LOAD_FAST_3 419 -#define _LOAD_FAST_4 420 -#define _LOAD_FAST_5 421 -#define _LOAD_FAST_6 422 -#define _LOAD_FAST_7 423 +#define _LOAD_FAST 417 +#define _LOAD_FAST_0 418 +#define _LOAD_FAST_1 419 +#define _LOAD_FAST_2 420 +#define _LOAD_FAST_3 421 +#define _LOAD_FAST_4 422 +#define _LOAD_FAST_5 423 +#define _LOAD_FAST_6 424 +#define _LOAD_FAST_7 425 #define _LOAD_FAST_AND_CLEAR LOAD_FAST_AND_CLEAR #define _LOAD_FAST_CHECK LOAD_FAST_CHECK #define _LOAD_FAST_LOAD_FAST LOAD_FAST_LOAD_FAST #define _LOAD_FROM_DICT_OR_DEREF LOAD_FROM_DICT_OR_DEREF #define _LOAD_FROM_DICT_OR_GLOBALS LOAD_FROM_DICT_OR_GLOBALS -#define _LOAD_GLOBAL 424 -#define _LOAD_GLOBAL_BUILTINS 425 -#define _LOAD_GLOBAL_MODULE 426 +#define _LOAD_GLOBAL 426 +#define _LOAD_GLOBAL_BUILTINS 427 +#define _LOAD_GLOBAL_MODULE 428 #define _LOAD_LOCALS LOAD_LOCALS #define _LOAD_NAME LOAD_NAME #define _LOAD_SPECIAL LOAD_SPECIAL @@ -222,55 +223,55 @@ extern "C" { #define _MATCH_KEYS MATCH_KEYS #define _MATCH_MAPPING MATCH_MAPPING #define _MATCH_SEQUENCE MATCH_SEQUENCE -#define _MAYBE_EXPAND_METHOD 427 -#define _MONITOR_CALL 428 +#define _MAYBE_EXPAND_METHOD 429 +#define _MONITOR_CALL 430 #define _NOP NOP #define _POP_EXCEPT POP_EXCEPT -#define _POP_JUMP_IF_FALSE 429 -#define _POP_JUMP_IF_TRUE 430 +#define _POP_JUMP_IF_FALSE 431 +#define _POP_JUMP_IF_TRUE 432 #define _POP_TOP POP_TOP -#define _POP_TOP_LOAD_CONST_INLINE_BORROW 431 +#define _POP_TOP_LOAD_CONST_INLINE_BORROW 433 #define _PUSH_EXC_INFO PUSH_EXC_INFO -#define _PUSH_FRAME 432 +#define _PUSH_FRAME 434 #define _PUSH_NULL PUSH_NULL -#define _PY_FRAME_GENERAL 433 -#define _REPLACE_WITH_TRUE 434 +#define _PY_FRAME_GENERAL 435 +#define _REPLACE_WITH_TRUE 436 #define _RESUME_CHECK RESUME_CHECK #define _RETURN_GENERATOR RETURN_GENERATOR #define _RETURN_VALUE RETURN_VALUE -#define _SAVE_RETURN_OFFSET 435 -#define _SEND 436 -#define _SEND_GEN_FRAME 437 +#define _SAVE_RETURN_OFFSET 437 +#define _SEND 438 +#define _SEND_GEN_FRAME 439 #define _SETUP_ANNOTATIONS SETUP_ANNOTATIONS #define _SET_ADD SET_ADD #define _SET_FUNCTION_ATTRIBUTE SET_FUNCTION_ATTRIBUTE #define _SET_UPDATE SET_UPDATE -#define _START_EXECUTOR 438 -#define _STORE_ATTR 439 -#define _STORE_ATTR_INSTANCE_VALUE 440 -#define _STORE_ATTR_SLOT 441 -#define _STORE_ATTR_WITH_HINT 442 +#define _START_EXECUTOR 440 +#define _STORE_ATTR 441 +#define _STORE_ATTR_INSTANCE_VALUE 442 +#define _STORE_ATTR_SLOT 443 +#define _STORE_ATTR_WITH_HINT 444 #define _STORE_DEREF STORE_DEREF -#define _STORE_FAST 443 -#define _STORE_FAST_0 444 -#define _STORE_FAST_1 445 -#define _STORE_FAST_2 446 -#define _STORE_FAST_3 447 -#define _STORE_FAST_4 448 -#define _STORE_FAST_5 449 -#define _STORE_FAST_6 450 -#define _STORE_FAST_7 451 +#define _STORE_FAST 445 +#define _STORE_FAST_0 446 +#define _STORE_FAST_1 447 +#define _STORE_FAST_2 448 +#define _STORE_FAST_3 449 +#define _STORE_FAST_4 450 +#define _STORE_FAST_5 451 +#define _STORE_FAST_6 452 +#define _STORE_FAST_7 453 #define _STORE_FAST_LOAD_FAST STORE_FAST_LOAD_FAST #define _STORE_FAST_STORE_FAST STORE_FAST_STORE_FAST #define _STORE_GLOBAL STORE_GLOBAL #define _STORE_NAME STORE_NAME #define _STORE_SLICE STORE_SLICE -#define _STORE_SUBSCR 452 +#define _STORE_SUBSCR 454 #define _STORE_SUBSCR_DICT STORE_SUBSCR_DICT #define _STORE_SUBSCR_LIST_INT STORE_SUBSCR_LIST_INT #define _SWAP SWAP -#define _TIER2_RESUME_CHECK 453 -#define _TO_BOOL 454 +#define _TIER2_RESUME_CHECK 455 +#define _TO_BOOL 456 #define _TO_BOOL_BOOL TO_BOOL_BOOL #define _TO_BOOL_INT TO_BOOL_INT #define _TO_BOOL_LIST TO_BOOL_LIST @@ -280,13 +281,13 @@ extern "C" { #define _UNARY_NEGATIVE UNARY_NEGATIVE #define _UNARY_NOT UNARY_NOT #define _UNPACK_EX UNPACK_EX -#define _UNPACK_SEQUENCE 455 +#define _UNPACK_SEQUENCE 457 #define _UNPACK_SEQUENCE_LIST UNPACK_SEQUENCE_LIST #define _UNPACK_SEQUENCE_TUPLE UNPACK_SEQUENCE_TUPLE #define _UNPACK_SEQUENCE_TWO_TUPLE UNPACK_SEQUENCE_TWO_TUPLE #define _WITH_EXCEPT_START WITH_EXCEPT_START #define _YIELD_VALUE YIELD_VALUE -#define MAX_UOP_ID 455 +#define MAX_UOP_ID 457 #ifdef __cplusplus } diff --git a/Include/internal/pycore_uop_metadata.h b/Include/internal/pycore_uop_metadata.h index d23a4e2ea14345..f5c666454dcbef 100644 --- a/Include/internal/pycore_uop_metadata.h +++ b/Include/internal/pycore_uop_metadata.h @@ -80,6 +80,8 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_BINARY_SUBSCR_STR_INT] = HAS_DEOPT_FLAG, [_BINARY_SUBSCR_TUPLE_INT] = HAS_DEOPT_FLAG, [_BINARY_SUBSCR_DICT] = HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_BINARY_SUBSCR_CHECK_FUNC] = HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG, + [_BINARY_SUBSCR_INIT_CALL] = 0, [_LIST_APPEND] = HAS_ARG_FLAG | HAS_ERROR_FLAG, [_SET_ADD] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_STORE_SUBSCR] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, @@ -288,7 +290,9 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = { [_BINARY_OP_SUBTRACT_INT] = "_BINARY_OP_SUBTRACT_INT", [_BINARY_SLICE] = "_BINARY_SLICE", [_BINARY_SUBSCR] = "_BINARY_SUBSCR", + [_BINARY_SUBSCR_CHECK_FUNC] = "_BINARY_SUBSCR_CHECK_FUNC", [_BINARY_SUBSCR_DICT] = "_BINARY_SUBSCR_DICT", + [_BINARY_SUBSCR_INIT_CALL] = "_BINARY_SUBSCR_INIT_CALL", [_BINARY_SUBSCR_LIST_INT] = "_BINARY_SUBSCR_LIST_INT", [_BINARY_SUBSCR_STR_INT] = "_BINARY_SUBSCR_STR_INT", [_BINARY_SUBSCR_TUPLE_INT] = "_BINARY_SUBSCR_TUPLE_INT", @@ -652,6 +656,10 @@ int _PyUop_num_popped(int opcode, int oparg) return 2; case _BINARY_SUBSCR_DICT: return 2; + case _BINARY_SUBSCR_CHECK_FUNC: + return 2; + case _BINARY_SUBSCR_INIT_CALL: + return 2; case _LIST_APPEND: return 2 + (oparg-1); case _SET_ADD: diff --git a/Python/bytecodes.c b/Python/bytecodes.c index abfd8039b293a1..414725549d1c20 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -765,32 +765,40 @@ dummy_func( res = PyStackRef_FromPyObjectSteal(res_o); } - inst(BINARY_SUBSCR_GETITEM, (unused/1, container_st, sub_st -- unused)) { - PyObject *container = PyStackRef_AsPyObjectBorrow(container_st); - - DEOPT_IF(tstate->interp->eval_frame); - PyTypeObject *tp = Py_TYPE(container); + op(_BINARY_SUBSCR_CHECK_FUNC, (container, unused -- container, unused)) { + PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(container)); DEOPT_IF(!PyType_HasFeature(tp, Py_TPFLAGS_HEAPTYPE)); PyHeapTypeObject *ht = (PyHeapTypeObject *)tp; - PyObject *cached = ht->_spec_cache.getitem; - DEOPT_IF(cached == NULL); - assert(PyFunction_Check(cached)); - PyFunctionObject *getitem = (PyFunctionObject *)cached; + PyObject *getitem = ht->_spec_cache.getitem; + DEOPT_IF(getitem == NULL); + assert(PyFunction_Check(getitem)); uint32_t cached_version = ht->_spec_cache.getitem_version; - DEOPT_IF(getitem->func_version != cached_version); - PyCodeObject *code = (PyCodeObject *)getitem->func_code; + DEOPT_IF(((PyFunctionObject *)getitem)->func_version != cached_version); + PyCodeObject *code = (PyCodeObject *)PyFunction_GET_CODE(getitem); assert(code->co_argcount == 2); DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize)); STAT_INC(BINARY_SUBSCR, hit); Py_INCREF(getitem); - _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, getitem, 2); - STACK_SHRINK(2); - new_frame->localsplus[0] = container_st; - new_frame->localsplus[1] = sub_st; - frame->return_offset = (uint16_t)(next_instr - this_instr); - DISPATCH_INLINED(new_frame); } + op(_BINARY_SUBSCR_INIT_CALL, (container, sub -- new_frame: _PyInterpreterFrame* )) { + PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(container)); + PyHeapTypeObject *ht = (PyHeapTypeObject *)tp; + PyObject *getitem = ht->_spec_cache.getitem; + new_frame = _PyFrame_PushUnchecked(tstate, (PyFunctionObject *)getitem, 2); + SYNC_SP(); + new_frame->localsplus[0] = container; + new_frame->localsplus[1] = sub; + frame->return_offset = (uint16_t)(1 + INLINE_CACHE_ENTRIES_BINARY_SUBSCR); + } + + macro(BINARY_SUBSCR_GETITEM) = + unused/1 + // Skip over the counter + _CHECK_PEP_523 + + _BINARY_SUBSCR_CHECK_FUNC + + _BINARY_SUBSCR_INIT_CALL + + _PUSH_FRAME; + inst(LIST_APPEND, (list, unused[oparg-1], v -- list, unused[oparg-1])) { ERROR_IF(_PyList_AppendTakeRef((PyListObject *)PyStackRef_AsPyObjectBorrow(list), PyStackRef_AsPyObjectSteal(v)) < 0, error); diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h index f0acc3b6ea2ef4..61e1c5cf5c289d 100644 --- a/Python/executor_cases.c.h +++ b/Python/executor_cases.c.h @@ -966,7 +966,57 @@ break; } - /* _BINARY_SUBSCR_GETITEM is not a viable micro-op for tier 2 because it uses the 'this_instr' variable */ + case _BINARY_SUBSCR_CHECK_FUNC: { + _PyStackRef container; + container = stack_pointer[-2]; + PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(container)); + if (!PyType_HasFeature(tp, Py_TPFLAGS_HEAPTYPE)) { + UOP_STAT_INC(uopcode, miss); + JUMP_TO_JUMP_TARGET(); + } + PyHeapTypeObject *ht = (PyHeapTypeObject *)tp; + PyObject *getitem = ht->_spec_cache.getitem; + if (getitem == NULL) { + UOP_STAT_INC(uopcode, miss); + JUMP_TO_JUMP_TARGET(); + } + assert(PyFunction_Check(getitem)); + uint32_t cached_version = ht->_spec_cache.getitem_version; + if (((PyFunctionObject *)getitem)->func_version != cached_version) { + UOP_STAT_INC(uopcode, miss); + JUMP_TO_JUMP_TARGET(); + } + PyCodeObject *code = (PyCodeObject *)PyFunction_GET_CODE(getitem); + assert(code->co_argcount == 2); + if (!_PyThreadState_HasStackSpace(tstate, code->co_framesize)) { + UOP_STAT_INC(uopcode, miss); + JUMP_TO_JUMP_TARGET(); + } + STAT_INC(BINARY_SUBSCR, hit); + Py_INCREF(getitem); + break; + } + + case _BINARY_SUBSCR_INIT_CALL: { + _PyStackRef sub; + _PyStackRef container; + _PyInterpreterFrame *new_frame; + sub = stack_pointer[-1]; + container = stack_pointer[-2]; + PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(container)); + PyHeapTypeObject *ht = (PyHeapTypeObject *)tp; + PyObject *getitem = ht->_spec_cache.getitem; + new_frame = _PyFrame_PushUnchecked(tstate, (PyFunctionObject *)getitem, 2); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + new_frame->localsplus[0] = container; + new_frame->localsplus[1] = sub; + frame->return_offset = (uint16_t)(1 + INLINE_CACHE_ENTRIES_BINARY_SUBSCR); + stack_pointer[0].bits = (uintptr_t)new_frame; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); + break; + } case _LIST_APPEND: { _PyStackRef v; diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index ff8c4eab58f324..4efaf899f23d1a 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -469,37 +469,63 @@ } TARGET(BINARY_SUBSCR_GETITEM) { - _Py_CODEUNIT *this_instr = frame->instr_ptr = next_instr; + frame->instr_ptr = next_instr; next_instr += 2; INSTRUCTION_STATS(BINARY_SUBSCR_GETITEM); static_assert(INLINE_CACHE_ENTRIES_BINARY_SUBSCR == 1, "incorrect cache size"); - _PyStackRef container_st; - _PyStackRef sub_st; + _PyStackRef container; + _PyStackRef sub; + _PyInterpreterFrame *new_frame; /* Skip 1 cache entry */ - sub_st = stack_pointer[-1]; - container_st = stack_pointer[-2]; - PyObject *container = PyStackRef_AsPyObjectBorrow(container_st); - DEOPT_IF(tstate->interp->eval_frame, BINARY_SUBSCR); - PyTypeObject *tp = Py_TYPE(container); - DEOPT_IF(!PyType_HasFeature(tp, Py_TPFLAGS_HEAPTYPE), BINARY_SUBSCR); - PyHeapTypeObject *ht = (PyHeapTypeObject *)tp; - PyObject *cached = ht->_spec_cache.getitem; - DEOPT_IF(cached == NULL, BINARY_SUBSCR); - assert(PyFunction_Check(cached)); - PyFunctionObject *getitem = (PyFunctionObject *)cached; - uint32_t cached_version = ht->_spec_cache.getitem_version; - DEOPT_IF(getitem->func_version != cached_version, BINARY_SUBSCR); - PyCodeObject *code = (PyCodeObject *)getitem->func_code; - assert(code->co_argcount == 2); - DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), BINARY_SUBSCR); - STAT_INC(BINARY_SUBSCR, hit); - Py_INCREF(getitem); - _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, getitem, 2); - STACK_SHRINK(2); - new_frame->localsplus[0] = container_st; - new_frame->localsplus[1] = sub_st; - frame->return_offset = (uint16_t)(next_instr - this_instr); - DISPATCH_INLINED(new_frame); + // _CHECK_PEP_523 + { + DEOPT_IF(tstate->interp->eval_frame, BINARY_SUBSCR); + } + // _BINARY_SUBSCR_CHECK_FUNC + container = stack_pointer[-2]; + { + PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(container)); + DEOPT_IF(!PyType_HasFeature(tp, Py_TPFLAGS_HEAPTYPE), BINARY_SUBSCR); + PyHeapTypeObject *ht = (PyHeapTypeObject *)tp; + PyObject *getitem = ht->_spec_cache.getitem; + DEOPT_IF(getitem == NULL, BINARY_SUBSCR); + assert(PyFunction_Check(getitem)); + uint32_t cached_version = ht->_spec_cache.getitem_version; + DEOPT_IF(((PyFunctionObject *)getitem)->func_version != cached_version, BINARY_SUBSCR); + PyCodeObject *code = (PyCodeObject *)PyFunction_GET_CODE(getitem); + assert(code->co_argcount == 2); + DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), BINARY_SUBSCR); + STAT_INC(BINARY_SUBSCR, hit); + Py_INCREF(getitem); + } + // _BINARY_SUBSCR_INIT_CALL + sub = stack_pointer[-1]; + { + PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(container)); + PyHeapTypeObject *ht = (PyHeapTypeObject *)tp; + PyObject *getitem = ht->_spec_cache.getitem; + new_frame = _PyFrame_PushUnchecked(tstate, (PyFunctionObject *)getitem, 2); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + new_frame->localsplus[0] = container; + new_frame->localsplus[1] = sub; + frame->return_offset = (uint16_t)(1 + INLINE_CACHE_ENTRIES_BINARY_SUBSCR); + } + // _PUSH_FRAME + { + // Write it out explicitly because it's subtly different. + // Eventually this should be the only occurrence of this code. + assert(tstate->interp->eval_frame == NULL); + _PyFrame_SetStackPointer(frame, stack_pointer); + new_frame->previous = frame; + CALL_STAT_INC(inlined_py_calls); + frame = tstate->current_frame = new_frame; + tstate->py_recursion_remaining--; + LOAD_SP(); + LOAD_IP(0); + LLTRACE_RESUME_FRAME(); + } + DISPATCH(); } TARGET(BINARY_SUBSCR_LIST_INT) { diff --git a/Python/optimizer.c b/Python/optimizer.c index 9d0381357f2123..e9cbfc5497189c 100644 --- a/Python/optimizer.c +++ b/Python/optimizer.c @@ -795,6 +795,7 @@ translate_bytecode_to_trace( assert(i + 1 == nuops); if (opcode == FOR_ITER_GEN || opcode == LOAD_ATTR_PROPERTY || + opcode == BINARY_SUBSCR_GETITEM || opcode == SEND_GEN) { DPRINTF(2, "Bailing due to dynamic target\n"); @@ -921,7 +922,9 @@ translate_bytecode_to_trace( 2 * INSTR_IP(initial_instr, code)); return 0; } - if (trace[trace_length-1].opcode != _JUMP_TO_TOP) { + if (!is_terminator(&trace[trace_length-1])) { + /* Allow space for _EXIT_TRACE */ + max_length += 2; ADD_TO_TRACE(_EXIT_TRACE, 0, 0, target); } DPRINTF(1, @@ -1102,7 +1105,7 @@ sanity_check(_PyExecutorObject *executor) CHECK(inst->format == UOP_FORMAT_JUMP); CHECK(inst->error_target < executor->code_size); } - if (opcode == _JUMP_TO_TOP || opcode == _EXIT_TRACE) { + if (is_terminator(inst)) { ended = true; i++; break; @@ -1207,8 +1210,7 @@ int effective_trace_length(_PyUOpInstruction *buffer, int length) if (opcode == _NOP) { nop_count++; } - if (opcode == _EXIT_TRACE || - opcode == _JUMP_TO_TOP) { + if (is_terminator(&buffer[i])) { return i+1-nop_count; } } @@ -1257,7 +1259,7 @@ uop_optimize( else if (oparg < _PyUop_Replication[opcode]) { buffer[pc].opcode = opcode + oparg + 1; } - else if (opcode == _JUMP_TO_TOP || opcode == _EXIT_TRACE) { + else if (is_terminator(&buffer[pc])) { break; } assert(_PyOpcode_uop_name[buffer[pc].opcode]); diff --git a/Python/optimizer_analysis.c b/Python/optimizer_analysis.c index 8c866417478128..f7adb44c9e09ef 100644 --- a/Python/optimizer_analysis.c +++ b/Python/optimizer_analysis.c @@ -52,14 +52,6 @@ #define DPRINTF(level, ...) #endif - - -static inline bool -op_is_end(uint32_t opcode) -{ - return opcode == _EXIT_TRACE || opcode == _JUMP_TO_TOP; -} - static int get_mutations(PyObject* dict) { assert(PyDict_CheckExact(dict)); @@ -288,7 +280,7 @@ remove_globals(_PyInterpreterFrame *frame, _PyUOpInstruction *buffer, prechecked_function_version = (uint32_t)buffer[pc].operand; break; default: - if (op_is_end(opcode)) { + if (is_terminator(inst)) { return 1; } break; @@ -552,6 +544,7 @@ remove_unneeded_uops(_PyUOpInstruction *buffer, int buffer_size) } case _JUMP_TO_TOP: case _EXIT_TRACE: + case _DYNAMIC_EXIT: return pc + 1; default: { diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h index b704c9e77319e4..50aa9728cf2939 100644 --- a/Python/optimizer_cases.c.h +++ b/Python/optimizer_cases.c.h @@ -539,7 +539,18 @@ break; } - /* _BINARY_SUBSCR_GETITEM is not a viable micro-op for tier 2 */ + case _BINARY_SUBSCR_CHECK_FUNC: { + break; + } + + case _BINARY_SUBSCR_INIT_CALL: { + _PyInterpreterFrame *new_frame; + new_frame = sym_new_not_null(ctx); + stack_pointer[-2] = (_Py_UopsSymbol *)new_frame; + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + break; + } case _LIST_APPEND: { stack_pointer += -1; From 8234419c32b9890689e26da936882bc1e9ee161f Mon Sep 17 00:00:00 2001 From: Jelle Zijlstra Date: Thu, 1 Aug 2024 16:28:25 -0700 Subject: [PATCH 47/70] gh-122562: Remove ste_free and ste_child_free from symtable (#122563) --- Include/internal/pycore_symtable.h | 3 --- Python/symtable.c | 12 ------------ 2 files changed, 15 deletions(-) diff --git a/Include/internal/pycore_symtable.h b/Include/internal/pycore_symtable.h index d9ed16a3d2321f..b449e8b9dcd91f 100644 --- a/Include/internal/pycore_symtable.h +++ b/Include/internal/pycore_symtable.h @@ -106,9 +106,6 @@ typedef struct _symtable_entry { const char *ste_scope_info; int ste_nested; /* true if block is nested */ - unsigned ste_free : 1; /* true if block has free variables */ - unsigned ste_child_free : 1; /* true if a child block has free vars, - including free refs to globals */ unsigned ste_generator : 1; /* true if namespace is a generator */ unsigned ste_coroutine : 1; /* true if namespace is a coroutine */ unsigned ste_annotations_used : 1; /* true if there are any annotations in this scope */ diff --git a/Python/symtable.c b/Python/symtable.c index 88af37198bfba5..89a0d8a2ccec1a 100644 --- a/Python/symtable.c +++ b/Python/symtable.c @@ -115,7 +115,6 @@ ste_new(struct symtable *st, identifier name, _Py_block_ty block, ste->ste_scope_info = NULL; ste->ste_nested = 0; - ste->ste_free = 0; ste->ste_varargs = 0; ste->ste_varkeywords = 0; ste->ste_annotations_used = 0; @@ -125,7 +124,6 @@ ste_new(struct symtable *st, identifier name, _Py_block_ty block, (st->st_cur->ste_nested || _PyST_IsFunctionLike(st->st_cur))) ste->ste_nested = 1; - ste->ste_child_free = 0; ste->ste_generator = 0; ste->ste_coroutine = 0; ste->ste_comprehension = NoComprehension; @@ -299,8 +297,6 @@ static void _dump_symtable(PySTEntryObject* ste, PyObject* prefix) comptype, prefix, ste->ste_nested ? " nested" : "", - ste->ste_free ? " free" : "", - ste->ste_child_free ? " child_free" : "", ste->ste_generator ? " generator" : "", ste->ste_coroutine ? " coroutine" : "", ste->ste_varargs ? " varargs" : "", @@ -692,7 +688,6 @@ analyze_name(PySTEntryObject *ste, PyObject *scopes, PyObject *name, long flags, return error_at_directive(ste, name); } SET_SCOPE(scopes, name, FREE); - ste->ste_free = 1; return PySet_Add(free, name) >= 0; } if (flags & DEF_BOUND) { @@ -741,7 +736,6 @@ analyze_name(PySTEntryObject *ste, PyObject *scopes, PyObject *name, long flags, } if (contains) { SET_SCOPE(scopes, name, FREE); - ste->ste_free = 1; return PySet_Add(free, name) >= 0; } } @@ -758,8 +752,6 @@ analyze_name(PySTEntryObject *ste, PyObject *scopes, PyObject *name, long flags, return 1; } } - if (ste->ste_nested) - ste->ste_free = 1; SET_SCOPE(scopes, name, GLOBAL_IMPLICIT); return 1; } @@ -842,7 +834,6 @@ inline_comprehension(PySTEntryObject *ste, PySTEntryObject *comp, } } } - comp->ste_free = PySet_Size(comp_free) > 0; if (remove_dunder_class && PyDict_DelItemString(comp->ste_symbols, "__class__") < 0) { return 0; } @@ -1202,9 +1193,6 @@ analyze_block(PySTEntryObject *ste, PyObject *bound, PyObject *free, if (!temp) goto error; Py_DECREF(temp); - /* Check if any children have free variables */ - if (entry->ste_free || entry->ste_child_free) - ste->ste_child_free = 1; } /* Splice children of inlined comprehensions into our children list */ From dbdbef3668293abdceac2b8a7b3e4615e6bde143 Mon Sep 17 00:00:00 2001 From: Jonathon Vandezande Date: Thu, 1 Aug 2024 21:31:37 -0400 Subject: [PATCH 48/70] Fixes typo in idlelib/idle_test/example_stub.pyi (#122520) --------- Co-authored-by: Terry Jan Reedy --- Lib/idlelib/idle_test/example_stub.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Lib/idlelib/idle_test/example_stub.pyi b/Lib/idlelib/idle_test/example_stub.pyi index 17b58010a9d8de..abcdbc17529974 100644 --- a/Lib/idlelib/idle_test/example_stub.pyi +++ b/Lib/idlelib/idle_test/example_stub.pyi @@ -1,4 +1,4 @@ -" Example to test recognition of .pyi file as Python source code. +# An example file to test recognition of a .pyi file as Python source code. class Example: def method(self, argument1: str, argument2: list[int]) -> None: ... From 5a7f7c48644baf82988f30bcb43e03dcfceb75dd Mon Sep 17 00:00:00 2001 From: John Riggles Date: Thu, 1 Aug 2024 23:02:43 -0400 Subject: [PATCH 49/70] gh-120083: Add IDLE Hovertip foreground color needed for recent macOS (#120605) On recent versions of macOS (sometime between Catalina and Sonoma 14.5), the default Hovertip foreground color changed from black to white, thereby matching the background. This might be a matter of matching the white foreground of the dark-mode text. The unreadable result is shown here (#120083 (comment)). The foreground and background colors were made parameters so we can pass different colors for future additional hovertips in IDLE. --------- Co-authored-by: Terry Jan Reedy --- Lib/idlelib/News3.txt | 4 ++++ Lib/idlelib/tooltip.py | 8 ++++++-- .../IDLE/2024-06-16-21-42-45.gh-issue-120083.nczuyv.rst | 1 + 3 files changed, 11 insertions(+), 2 deletions(-) create mode 100644 Misc/NEWS.d/next/IDLE/2024-06-16-21-42-45.gh-issue-120083.nczuyv.rst diff --git a/Lib/idlelib/News3.txt b/Lib/idlelib/News3.txt index a7a92e97b6c244..37ff93f9866e3c 100644 --- a/Lib/idlelib/News3.txt +++ b/Lib/idlelib/News3.txt @@ -4,6 +4,10 @@ Released on 2024-10-xx ========================= +gh-120083: Add explicit black IDLE Hovertip foreground color needed for +recent macOS. Fixes Sonoma showing unreadable white on pale yellow. +Patch by John Riggles. + gh-122482: Change About IDLE to direct users to discuss.python.org instead of the now unused idle-dev email and mailing list. diff --git a/Lib/idlelib/tooltip.py b/Lib/idlelib/tooltip.py index 3983690dd41177..df5b1fe1dcfb08 100644 --- a/Lib/idlelib/tooltip.py +++ b/Lib/idlelib/tooltip.py @@ -144,7 +144,8 @@ def hidetip(self): class Hovertip(OnHoverTooltipBase): "A tooltip that pops up when a mouse hovers over an anchor widget." - def __init__(self, anchor_widget, text, hover_delay=1000): + def __init__(self, anchor_widget, text, hover_delay=1000, + foreground="#000000", background="#ffffe0"): """Create a text tooltip with a mouse hover delay. anchor_widget: the widget next to which the tooltip will be shown @@ -156,10 +157,13 @@ def __init__(self, anchor_widget, text, hover_delay=1000): """ super().__init__(anchor_widget, hover_delay=hover_delay) self.text = text + self.foreground = foreground + self.background = background def showcontents(self): label = Label(self.tipwindow, text=self.text, justify=LEFT, - background="#ffffe0", relief=SOLID, borderwidth=1) + relief=SOLID, borderwidth=1, + foreground=self.foreground, background=self.background) label.pack() diff --git a/Misc/NEWS.d/next/IDLE/2024-06-16-21-42-45.gh-issue-120083.nczuyv.rst b/Misc/NEWS.d/next/IDLE/2024-06-16-21-42-45.gh-issue-120083.nczuyv.rst new file mode 100644 index 00000000000000..643c2bb38c6e1f --- /dev/null +++ b/Misc/NEWS.d/next/IDLE/2024-06-16-21-42-45.gh-issue-120083.nczuyv.rst @@ -0,0 +1 @@ +Add explicit black IDLE Hovertip foreground color needed for recent macOS. Fixes Sonoma showing unreadable white on pale yellow. Patch by John Riggles. From d57f8a9f76e75384ec997686c2a826b1dc3c69c4 Mon Sep 17 00:00:00 2001 From: Damien <81557462+Damien-Chen@users.noreply.github.com> Date: Fri, 2 Aug 2024 14:09:27 +0800 Subject: [PATCH 50/70] gh-122544: Change OS image in readthedocs.yml to ubuntu-24.04 (#122568) --- .readthedocs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index d0d0c3b93ed207..a57de00544e4e3 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -8,7 +8,7 @@ sphinx: configuration: Doc/conf.py build: - os: ubuntu-22.04 + os: ubuntu-24.04 tools: python: "3" From 03b88522f5e847773845b0fac90fd06d04937a65 Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Fri, 2 Aug 2024 13:12:19 +0300 Subject: [PATCH 51/70] gh-122188: Remove _imp.pyc_magic_number (GH-122503) _imp.pyc_magic_number_token should be enough. --- Lib/importlib/_bootstrap_external.py | 2 +- Lib/test/test_import/__init__.py | 10 ++++++---- Python/import.c | 6 +----- 3 files changed, 8 insertions(+), 10 deletions(-) diff --git a/Lib/importlib/_bootstrap_external.py b/Lib/importlib/_bootstrap_external.py index 4d154dc4c25edc..5bbcb376a4a6b3 100644 --- a/Lib/importlib/_bootstrap_external.py +++ b/Lib/importlib/_bootstrap_external.py @@ -221,7 +221,7 @@ def _write_atomic(path, data, mode=0o666): _code_type = type(_write_atomic.__code__) -MAGIC_NUMBER = (_imp.pyc_magic_number).to_bytes(2, 'little') + b'\r\n' +MAGIC_NUMBER = _imp.pyc_magic_number_token.to_bytes(4, 'little') _PYCACHE = '__pycache__' _OPT = 'opt-' diff --git a/Lib/test/test_import/__init__.py b/Lib/test/test_import/__init__.py index 56c6ffe93fce37..fd778ec216cc98 100644 --- a/Lib/test/test_import/__init__.py +++ b/Lib/test/test_import/__init__.py @@ -3116,10 +3116,12 @@ def test_pyimport_addmodule_create(self): @cpython_only class TestMagicNumber(unittest.TestCase): def test_magic_number_endianness(self): - magic_number = (_imp.pyc_magic_number).to_bytes(2, 'little') + b'\r\n' - raw_magic_number = int.from_bytes(magic_number, 'little') - - self.assertEqual(raw_magic_number, _imp.pyc_magic_number_token) + magic_number_bytes = _imp.pyc_magic_number_token.to_bytes(4, 'little') + self.assertEqual(magic_number_bytes[2:], b'\r\n') + # Starting with Python 3.11, Python 3.n starts with magic number 2900+50n. + magic_number = int.from_bytes(magic_number_bytes[:2], 'little') + start = 2900 + sys.version_info.minor * 50 + self.assertIn(magic_number, range(start, start + 50)) if __name__ == '__main__': diff --git a/Python/import.c b/Python/import.c index 540874a0f0414f..f4c0d544fbdefa 100644 --- a/Python/import.c +++ b/Python/import.c @@ -6,7 +6,7 @@ #include "pycore_import.h" // _PyImport_BootstrapImp() #include "pycore_initconfig.h" // _PyStatus_OK() #include "pycore_interp.h" // struct _import_runtime_state -#include "pycore_magic_number.h" // PYC_MAGIC_NUMBER +#include "pycore_magic_number.h" // PYC_MAGIC_NUMBER_TOKEN #include "pycore_namespace.h" // _PyNamespace_Type #include "pycore_object.h" // _Py_SetImmortal() #include "pycore_pyerrors.h" // _PyErr_SetString() @@ -4810,10 +4810,6 @@ imp_module_exec(PyObject *module) return -1; } - if (PyModule_AddIntConstant(module, "pyc_magic_number", PYC_MAGIC_NUMBER) < 0) { - return -1; - } - if (PyModule_AddIntConstant( module, "pyc_magic_number_token", PYC_MAGIC_NUMBER_TOKEN) < 0) { From addbb73927f55855dfcc62fd47b0018de8a814ed Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Fri, 2 Aug 2024 12:13:33 +0200 Subject: [PATCH 52/70] Update PyObject_Del() documentation (#122597) Replace PyMem_Del() with PyMem_Free(). --- Doc/c-api/allocation.rst | 7 +------ Doc/c-api/memory.rst | 6 +++--- Modules/_sre/sre.c | 2 +- Modules/_testcapi/heaptype.c | 16 ++++++++-------- 4 files changed, 13 insertions(+), 18 deletions(-) diff --git a/Doc/c-api/allocation.rst b/Doc/c-api/allocation.rst index b3609c233156b6..0d53b18ea87d5e 100644 --- a/Doc/c-api/allocation.rst +++ b/Doc/c-api/allocation.rst @@ -54,12 +54,7 @@ Allocating Objects on the Heap .. c:function:: void PyObject_Del(void *op) - Releases memory allocated to an object using :c:macro:`PyObject_New` or - :c:macro:`PyObject_NewVar`. This is normally called from the - :c:member:`~PyTypeObject.tp_dealloc` handler specified in the object's type. The fields of - the object should not be accessed after this call as the memory is no - longer a valid Python object. - + Same as :c:func:`PyObject_Free`. .. c:var:: PyObject _Py_NoneStruct diff --git a/Doc/c-api/memory.rst b/Doc/c-api/memory.rst index 9da09a21607f61..4ecc998b37e598 100644 --- a/Doc/c-api/memory.rst +++ b/Doc/c-api/memory.rst @@ -734,7 +734,7 @@ The same code using the type-oriented function set:: return PyErr_NoMemory(); /* ...Do some I/O operation involving buf... */ res = PyBytes_FromString(buf); - PyMem_Del(buf); /* allocated with PyMem_New */ + PyMem_Free(buf); /* allocated with PyMem_New */ return res; Note that in the two examples above, the buffer is always manipulated via @@ -750,11 +750,11 @@ allocators operating on different heaps. :: ... PyMem_Del(buf3); /* Wrong -- should be PyMem_Free() */ free(buf2); /* Right -- allocated via malloc() */ - free(buf1); /* Fatal -- should be PyMem_Del() */ + free(buf1); /* Fatal -- should be PyMem_Free() */ In addition to the functions aimed at handling raw memory blocks from the Python heap, objects in Python are allocated and released with :c:macro:`PyObject_New`, -:c:macro:`PyObject_NewVar` and :c:func:`PyObject_Del`. +:c:macro:`PyObject_NewVar` and :c:func:`PyObject_Free`. These will be explained in the next chapter on defining and implementing new object types in C. diff --git a/Modules/_sre/sre.c b/Modules/_sre/sre.c index 0a888af31b0497..01420d1a10b1cf 100644 --- a/Modules/_sre/sre.c +++ b/Modules/_sre/sre.c @@ -530,7 +530,7 @@ state_fini(SRE_STATE* state) PyBuffer_Release(&state->buffer); Py_XDECREF(state->string); data_stack_dealloc(state); - /* See above PyMem_Del for why we explicitly cast here. */ + /* See above PyMem_Free() for why we explicitly cast here. */ PyMem_Free((void*) state->mark); state->mark = NULL; } diff --git a/Modules/_testcapi/heaptype.c b/Modules/_testcapi/heaptype.c index 4526583a8059d9..b45b890b88d81f 100644 --- a/Modules/_testcapi/heaptype.c +++ b/Modules/_testcapi/heaptype.c @@ -269,16 +269,16 @@ test_type_from_ephemeral_spec(PyObject *self, PyObject *Py_UNUSED(ignored)) // (Explicitly overwrite memory before freeing, // so bugs show themselves even without the debug allocator's help.) memset(spec, 0xdd, sizeof(PyType_Spec)); - PyMem_Del(spec); + PyMem_Free(spec); spec = NULL; memset(name, 0xdd, sizeof(NAME)); - PyMem_Del(name); + PyMem_Free(name); name = NULL; memset(doc, 0xdd, sizeof(DOC)); - PyMem_Del(doc); + PyMem_Free(doc); doc = NULL; memset(slots, 0xdd, 3 * sizeof(PyType_Slot)); - PyMem_Del(slots); + PyMem_Free(slots); slots = NULL; /* check that everything works */ @@ -304,10 +304,10 @@ test_type_from_ephemeral_spec(PyObject *self, PyObject *Py_UNUSED(ignored)) result = Py_NewRef(Py_None); finally: - PyMem_Del(spec); - PyMem_Del(name); - PyMem_Del(doc); - PyMem_Del(slots); + PyMem_Free(spec); + PyMem_Free(name); + PyMem_Free(doc); + PyMem_Free(slots); Py_XDECREF(class); Py_XDECREF(instance); Py_XDECREF(obj); From fb864c76cd5e450e789a7b4095832e118cc49a39 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9n=C3=A9dikt=20Tran?= <10796600+picnixz@users.noreply.github.com> Date: Fri, 2 Aug 2024 13:16:32 +0200 Subject: [PATCH 53/70] gh-121723: Relax constraints on queue objects for `logging.handlers.QueueHandler`. (GH-122154) --- Doc/library/logging.config.rst | 9 +- Lib/logging/config.py | 55 ++++----- Lib/test/test_logging.py | 107 ++++++++++++++---- ...-07-23-10-59-38.gh-issue-121723.iJEf7e.rst | 3 + 4 files changed, 124 insertions(+), 50 deletions(-) create mode 100644 Misc/NEWS.d/next/Library/2024-07-23-10-59-38.gh-issue-121723.iJEf7e.rst diff --git a/Doc/library/logging.config.rst b/Doc/library/logging.config.rst index dfbf0b1cf2f9ff..0ddbc1a5f88048 100644 --- a/Doc/library/logging.config.rst +++ b/Doc/library/logging.config.rst @@ -753,9 +753,12 @@ The ``queue`` and ``listener`` keys are optional. If the ``queue`` key is present, the corresponding value can be one of the following: -* An actual instance of :class:`queue.Queue` or a subclass thereof. This is of course - only possible if you are constructing or modifying the configuration dictionary in - code. +* An object implementing the :class:`queue.Queue` public API. For instance, + this may be an actual instance of :class:`queue.Queue` or a subclass thereof, + or a proxy obtained by :meth:`multiprocessing.managers.SyncManager.Queue`. + + This is of course only possible if you are constructing or modifying + the configuration dictionary in code. * A string that resolves to a callable which, when called with no arguments, returns the :class:`queue.Queue` instance to use. That callable could be a diff --git a/Lib/logging/config.py b/Lib/logging/config.py index 95e129ae988c24..3781cb1aeb9ae2 100644 --- a/Lib/logging/config.py +++ b/Lib/logging/config.py @@ -497,6 +497,33 @@ def as_tuple(self, value): value = tuple(value) return value +def _is_queue_like_object(obj): + """Check that *obj* implements the Queue API.""" + if isinstance(obj, queue.Queue): + return True + # defer importing multiprocessing as much as possible + from multiprocessing.queues import Queue as MPQueue + if isinstance(obj, MPQueue): + return True + # Depending on the multiprocessing start context, we cannot create + # a multiprocessing.managers.BaseManager instance 'mm' to get the + # runtime type of mm.Queue() or mm.JoinableQueue() (see gh-119819). + # + # Since we only need an object implementing the Queue API, we only + # do a protocol check, but we do not use typing.runtime_checkable() + # and typing.Protocol to reduce import time (see gh-121723). + # + # Ideally, we would have wanted to simply use strict type checking + # instead of a protocol-based type checking since the latter does + # not check the method signatures. + queue_interface = [ + 'empty', 'full', 'get', 'get_nowait', + 'put', 'put_nowait', 'join', 'qsize', + 'task_done', + ] + return all(callable(getattr(obj, method, None)) + for method in queue_interface) + class DictConfigurator(BaseConfigurator): """ Configure logging using a dictionary-like object to describe the @@ -791,32 +818,8 @@ def configure_handler(self, config): if '()' not in qspec: raise TypeError('Invalid queue specifier %r' % qspec) config['queue'] = self.configure_custom(dict(qspec)) - else: - from multiprocessing.queues import Queue as MPQueue - - if not isinstance(qspec, (queue.Queue, MPQueue)): - # Safely check if 'qspec' is an instance of Manager.Queue - # / Manager.JoinableQueue - - from multiprocessing import Manager as MM - from multiprocessing.managers import BaseProxy - - # if it's not an instance of BaseProxy, it also can't be - # an instance of Manager.Queue / Manager.JoinableQueue - if isinstance(qspec, BaseProxy): - # Sometimes manager or queue creation might fail - # (e.g. see issue gh-120868). In that case, any - # exception during the creation of these queues will - # propagate up to the caller and be wrapped in a - # `ValueError`, whose cause will indicate the details of - # the failure. - mm = MM() - proxy_queue = mm.Queue() - proxy_joinable_queue = mm.JoinableQueue() - if not isinstance(qspec, (type(proxy_queue), type(proxy_joinable_queue))): - raise TypeError('Invalid queue specifier %r' % qspec) - else: - raise TypeError('Invalid queue specifier %r' % qspec) + elif not _is_queue_like_object(qspec): + raise TypeError('Invalid queue specifier %r' % qspec) if 'listener' in config: lspec = config['listener'] diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py index 6d688d4b81bbf4..49523756e115c6 100644 --- a/Lib/test/test_logging.py +++ b/Lib/test/test_logging.py @@ -2368,6 +2368,26 @@ class CustomListener(logging.handlers.QueueListener): class CustomQueue(queue.Queue): pass +class CustomQueueProtocol: + def __init__(self, maxsize=0): + self.queue = queue.Queue(maxsize) + + def __getattr__(self, attribute): + queue = object.__getattribute__(self, 'queue') + return getattr(queue, attribute) + +class CustomQueueFakeProtocol(CustomQueueProtocol): + # An object implementing the Queue API (incorrect signatures). + # The object will be considered a valid queue class since we + # do not check the signatures (only callability of methods) + # but will NOT be usable in production since a TypeError will + # be raised due to a missing argument. + def empty(self, x): + pass + +class CustomQueueWrongProtocol(CustomQueueProtocol): + empty = None + def queueMaker(): return queue.Queue() @@ -3901,18 +3921,16 @@ def do_queuehandler_configuration(self, qspec, lspec): @threading_helper.requires_working_threading() @support.requires_subprocess() def test_config_queue_handler(self): - q = CustomQueue() - dq = { - '()': __name__ + '.CustomQueue', - 'maxsize': 10 - } + qs = [CustomQueue(), CustomQueueProtocol()] + dqs = [{'()': f'{__name__}.{cls}', 'maxsize': 10} + for cls in ['CustomQueue', 'CustomQueueProtocol']] dl = { '()': __name__ + '.listenerMaker', 'arg1': None, 'arg2': None, 'respect_handler_level': True } - qvalues = (None, __name__ + '.queueMaker', __name__ + '.CustomQueue', dq, q) + qvalues = (None, __name__ + '.queueMaker', __name__ + '.CustomQueue', *dqs, *qs) lvalues = (None, __name__ + '.CustomListener', dl, CustomListener) for qspec, lspec in itertools.product(qvalues, lvalues): self.do_queuehandler_configuration(qspec, lspec) @@ -3932,15 +3950,21 @@ def test_config_queue_handler(self): @support.requires_subprocess() @patch("multiprocessing.Manager") def test_config_queue_handler_does_not_create_multiprocessing_manager(self, manager): - # gh-120868 + # gh-120868, gh-121723 from multiprocessing import Queue as MQ q1 = {"()": "queue.Queue", "maxsize": -1} q2 = MQ() q3 = queue.Queue() - - for qspec in (q1, q2, q3): + # CustomQueueFakeProtocol passes the checks but will not be usable + # since the signatures are incompatible. Checking the Queue API + # without testing the type of the actual queue is a trade-off + # between usability and the work we need to do in order to safely + # check that the queue object correctly implements the API. + q4 = CustomQueueFakeProtocol() + + for qspec in (q1, q2, q3, q4): self.apply_config( { "version": 1, @@ -3956,21 +3980,62 @@ def test_config_queue_handler_does_not_create_multiprocessing_manager(self, mana @patch("multiprocessing.Manager") def test_config_queue_handler_invalid_config_does_not_create_multiprocessing_manager(self, manager): - # gh-120868 + # gh-120868, gh-121723 - with self.assertRaises(ValueError): - self.apply_config( - { - "version": 1, - "handlers": { - "queue_listener": { - "class": "logging.handlers.QueueHandler", - "queue": object(), + for qspec in [object(), CustomQueueWrongProtocol()]: + with self.assertRaises(ValueError): + self.apply_config( + { + "version": 1, + "handlers": { + "queue_listener": { + "class": "logging.handlers.QueueHandler", + "queue": qspec, + }, }, - }, + } + ) + manager.assert_not_called() + + @skip_if_tsan_fork + @support.requires_subprocess() + @unittest.skipUnless(support.Py_DEBUG, "requires a debug build for testing" + "assertions in multiprocessing") + def test_config_queue_handler_multiprocessing_context(self): + # regression test for gh-121723 + if support.MS_WINDOWS: + start_methods = ['spawn'] + else: + start_methods = ['spawn', 'fork', 'forkserver'] + for start_method in start_methods: + with self.subTest(start_method=start_method): + ctx = multiprocessing.get_context(start_method) + with ctx.Manager() as manager: + q = manager.Queue() + records = [] + # use 1 process and 1 task per child to put 1 record + with ctx.Pool(1, initializer=self._mpinit_issue121723, + initargs=(q, "text"), maxtasksperchild=1): + records.append(q.get(timeout=60)) + self.assertTrue(q.empty()) + self.assertEqual(len(records), 1) + + @staticmethod + def _mpinit_issue121723(qspec, message_to_log): + # static method for pickling support + logging.config.dictConfig({ + 'version': 1, + 'disable_existing_loggers': True, + 'handlers': { + 'log_to_parent': { + 'class': 'logging.handlers.QueueHandler', + 'queue': qspec } - ) - manager.assert_not_called() + }, + 'root': {'handlers': ['log_to_parent'], 'level': 'DEBUG'} + }) + # log a message (this creates a record put in the queue) + logging.getLogger().info(message_to_log) @skip_if_tsan_fork @support.requires_subprocess() diff --git a/Misc/NEWS.d/next/Library/2024-07-23-10-59-38.gh-issue-121723.iJEf7e.rst b/Misc/NEWS.d/next/Library/2024-07-23-10-59-38.gh-issue-121723.iJEf7e.rst new file mode 100644 index 00000000000000..cabb4024fb10f1 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-07-23-10-59-38.gh-issue-121723.iJEf7e.rst @@ -0,0 +1,3 @@ +Make :func:`logging.config.dictConfig` accept any object implementing the +Queue public API. See the :ref:`queue configuration ` +section for details. Patch by Bénédikt Tran. From b5e6fb39a246bf7ee470d58632cdf588bb9d0298 Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Fri, 2 Aug 2024 09:32:08 -0400 Subject: [PATCH 54/70] gh-120974: Make asyncio `swap_current_task` safe in free-threaded build (#122317) * gh-120974: Make asyncio `swap_current_task` safe in free-threaded build --- Include/internal/pycore_dict.h | 7 ++++- Modules/_asynciomodule.c | 37 ++++++++++++++--------- Objects/dictobject.c | 54 ++++++++++++++++++++++++---------- 3 files changed, 67 insertions(+), 31 deletions(-) diff --git a/Include/internal/pycore_dict.h b/Include/internal/pycore_dict.h index fc304aca7fea10..a84246ee34efff 100644 --- a/Include/internal/pycore_dict.h +++ b/Include/internal/pycore_dict.h @@ -108,8 +108,13 @@ PyAPI_FUNC(PyObject *)_PyDict_LoadGlobal(PyDictObject *, PyDictObject *, PyObjec /* Consumes references to key and value */ PyAPI_FUNC(int) _PyDict_SetItem_Take2(PyDictObject *op, PyObject *key, PyObject *value); extern int _PyDict_SetItem_LockHeld(PyDictObject *dict, PyObject *name, PyObject *value); -extern int _PyDict_GetItemRef_Unicode_LockHeld(PyDictObject *op, PyObject *key, PyObject **result); +// Export for '_asyncio' shared extension +PyAPI_FUNC(int) _PyDict_SetItem_KnownHash_LockHeld(PyDictObject *mp, PyObject *key, + PyObject *value, Py_hash_t hash); +// Export for '_asyncio' shared extension +PyAPI_FUNC(int) _PyDict_GetItemRef_KnownHash_LockHeld(PyDictObject *op, PyObject *key, Py_hash_t hash, PyObject **result); extern int _PyDict_GetItemRef_KnownHash(PyDictObject *op, PyObject *key, Py_hash_t hash, PyObject **result); +extern int _PyDict_GetItemRef_Unicode_LockHeld(PyDictObject *op, PyObject *key, PyObject **result); extern int _PyObjectDict_SetItem(PyTypeObject *tp, PyObject *obj, PyObject **dictptr, PyObject *name, PyObject *value); extern int _PyDict_Pop_KnownHash( diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c index 873c17cd78709d..c6eb43f044fdbd 100644 --- a/Modules/_asynciomodule.c +++ b/Modules/_asynciomodule.c @@ -2026,6 +2026,24 @@ leave_task(asyncio_state *state, PyObject *loop, PyObject *task) return res; } +static PyObject * +swap_current_task_lock_held(PyDictObject *current_tasks, PyObject *loop, + Py_hash_t hash, PyObject *task) +{ + PyObject *prev_task; + if (_PyDict_GetItemRef_KnownHash_LockHeld(current_tasks, loop, hash, &prev_task) < 0) { + return NULL; + } + if (_PyDict_SetItem_KnownHash_LockHeld(current_tasks, loop, task, hash) < 0) { + Py_XDECREF(prev_task); + return NULL; + } + if (prev_task == NULL) { + Py_RETURN_NONE; + } + return prev_task; +} + static PyObject * swap_current_task(asyncio_state *state, PyObject *loop, PyObject *task) { @@ -2041,24 +2059,15 @@ swap_current_task(asyncio_state *state, PyObject *loop, PyObject *task) return prev_task; } - Py_hash_t hash; - hash = PyObject_Hash(loop); + Py_hash_t hash = PyObject_Hash(loop); if (hash == -1) { return NULL; } - prev_task = _PyDict_GetItem_KnownHash(state->current_tasks, loop, hash); - if (prev_task == NULL) { - if (PyErr_Occurred()) { - return NULL; - } - prev_task = Py_None; - } - Py_INCREF(prev_task); - if (_PyDict_SetItem_KnownHash(state->current_tasks, loop, task, hash) == -1) { - Py_DECREF(prev_task); - return NULL; - } + PyDictObject *current_tasks = (PyDictObject *)state->current_tasks; + Py_BEGIN_CRITICAL_SECTION(current_tasks); + prev_task = swap_current_task_lock_held(current_tasks, loop, hash, task); + Py_END_CRITICAL_SECTION(); return prev_task; } diff --git a/Objects/dictobject.c b/Objects/dictobject.c index 6a16a04102a6c0..3e9f982ae070a3 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -2216,6 +2216,29 @@ _PyDict_GetItem_KnownHash(PyObject *op, PyObject *key, Py_hash_t hash) return value; // borrowed reference } +/* Gets an item and provides a new reference if the value is present. + * Returns 1 if the key is present, 0 if the key is missing, and -1 if an + * exception occurred. +*/ +int +_PyDict_GetItemRef_KnownHash_LockHeld(PyDictObject *op, PyObject *key, + Py_hash_t hash, PyObject **result) +{ + PyObject *value; + Py_ssize_t ix = _Py_dict_lookup(op, key, hash, &value); + assert(ix >= 0 || value == NULL); + if (ix == DKIX_ERROR) { + *result = NULL; + return -1; + } + if (value == NULL) { + *result = NULL; + return 0; // missing key + } + *result = Py_NewRef(value); + return 1; // key is present +} + /* Gets an item and provides a new reference if the value is present. * Returns 1 if the key is present, 0 if the key is missing, and -1 if an * exception occurred. @@ -2460,11 +2483,21 @@ setitem_lock_held(PyDictObject *mp, PyObject *key, PyObject *value) int -_PyDict_SetItem_KnownHash(PyObject *op, PyObject *key, PyObject *value, - Py_hash_t hash) +_PyDict_SetItem_KnownHash_LockHeld(PyDictObject *mp, PyObject *key, PyObject *value, + Py_hash_t hash) { - PyDictObject *mp; + PyInterpreterState *interp = _PyInterpreterState_GET(); + if (mp->ma_keys == Py_EMPTY_KEYS) { + return insert_to_emptydict(interp, mp, Py_NewRef(key), hash, Py_NewRef(value)); + } + /* insertdict() handles any resizing that might be necessary */ + return insertdict(interp, mp, Py_NewRef(key), hash, Py_NewRef(value)); +} +int +_PyDict_SetItem_KnownHash(PyObject *op, PyObject *key, PyObject *value, + Py_hash_t hash) +{ if (!PyDict_Check(op)) { PyErr_BadInternalCall(); return -1; @@ -2472,21 +2505,10 @@ _PyDict_SetItem_KnownHash(PyObject *op, PyObject *key, PyObject *value, assert(key); assert(value); assert(hash != -1); - mp = (PyDictObject *)op; int res; - PyInterpreterState *interp = _PyInterpreterState_GET(); - - Py_BEGIN_CRITICAL_SECTION(mp); - - if (mp->ma_keys == Py_EMPTY_KEYS) { - res = insert_to_emptydict(interp, mp, Py_NewRef(key), hash, Py_NewRef(value)); - } - else { - /* insertdict() handles any resizing that might be necessary */ - res = insertdict(interp, mp, Py_NewRef(key), hash, Py_NewRef(value)); - } - + Py_BEGIN_CRITICAL_SECTION(op); + res = _PyDict_SetItem_KnownHash_LockHeld((PyDictObject *)op, key, value, hash); Py_END_CRITICAL_SECTION(); return res; } From 9fc1c992d6fcea0b7558c581846eef6bdd811f6c Mon Sep 17 00:00:00 2001 From: neonene <53406459+neonene@users.noreply.github.com> Date: Fri, 2 Aug 2024 22:36:20 +0900 Subject: [PATCH 55/70] gh-122334: Fix crash when importing ssl after re-initialization (#122481) * Fix crash when importing ssl after re-initialization --- Lib/test/test_embed.py | 19 +++++++++++++++++++ ...-07-30-21-29-30.gh-issue-122334.LeoE1x.rst | 1 + Python/getargs.c | 13 +++++++++++++ 3 files changed, 33 insertions(+) create mode 100644 Misc/NEWS.d/next/Library/2024-07-30-21-29-30.gh-issue-122334.LeoE1x.rst diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py index 9602f1a92c37c8..ab112d6be85b46 100644 --- a/Lib/test/test_embed.py +++ b/Lib/test/test_embed.py @@ -461,6 +461,25 @@ def add(cls, slot, own): self.assertEqual(result, {}) self.assertEqual(out, '') + def test_getargs_reset_static_parser(self): + # Test _PyArg_Parser initializations via _PyArg_UnpackKeywords() + # https://github.com/python/cpython/issues/122334 + code = textwrap.dedent(""" + import _ssl + _ssl.txt2obj(txt='1.3') + print('1') + + import _queue + _queue.SimpleQueue().put_nowait(item=None) + print('2') + + import _zoneinfo + _zoneinfo.ZoneInfo.clear_cache(only_keys=['Foo/Bar']) + print('3') + """) + out, err = self.run_embedded_interpreter("test_repeated_init_exec", code) + self.assertEqual(out, '1\n2\n3\n' * INIT_LOOPS) + @unittest.skipIf(_testinternalcapi is None, "requires _testinternalcapi") class InitConfigTests(EmbeddingTestsMixin, unittest.TestCase): diff --git a/Misc/NEWS.d/next/Library/2024-07-30-21-29-30.gh-issue-122334.LeoE1x.rst b/Misc/NEWS.d/next/Library/2024-07-30-21-29-30.gh-issue-122334.LeoE1x.rst new file mode 100644 index 00000000000000..cef801c950faa6 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-07-30-21-29-30.gh-issue-122334.LeoE1x.rst @@ -0,0 +1 @@ +Fix crash when importing :mod:`ssl` after the main interpreter restarts. diff --git a/Python/getargs.c b/Python/getargs.c index b96ce3a22dae7c..ec2eeb15c832c3 100644 --- a/Python/getargs.c +++ b/Python/getargs.c @@ -2030,6 +2030,19 @@ parser_clear(struct _PyArg_Parser *parser) if (parser->is_kwtuple_owned) { Py_CLEAR(parser->kwtuple); } + + if (parser->format) { + parser->fname = NULL; + } + else { + assert(parser->fname != NULL); + } + parser->custom_msg = NULL; + parser->pos = 0; + parser->min = 0; + parser->max = 0; + parser->is_kwtuple_owned = 0; + parser->once.v = 0; } static PyObject* From 498376d7a7d6f704f22a2c963130cc15c17e7a6f Mon Sep 17 00:00:00 2001 From: Irit Katriel <1055913+iritkatriel@users.noreply.github.com> Date: Fri, 2 Aug 2024 15:40:42 +0100 Subject: [PATCH 56/70] gh-122445: populate only modified fields in __static_attributes__ (#122446) --- Doc/reference/datamodel.rst | 2 +- Doc/whatsnew/3.13.rst | 2 +- Lib/test/test_compile.py | 5 ++++- ...4-07-30-11-41-35.gh-issue-122445.Rq0bjS.rst | 1 + Python/compile.c | 18 +++++++++++------- 5 files changed, 18 insertions(+), 10 deletions(-) create mode 100644 Misc/NEWS.d/next/Core and Builtins/2024-07-30-11-41-35.gh-issue-122445.Rq0bjS.rst diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst index 2576f9a07284eb..aa61fbdd3131f6 100644 --- a/Doc/reference/datamodel.rst +++ b/Doc/reference/datamodel.rst @@ -999,7 +999,7 @@ Special attributes: a :ref:`generic class `. :attr:`~class.__static_attributes__` - A tuple containing names of attributes of this class which are accessed + A tuple containing names of attributes of this class which are assigned through ``self.X`` from any function in its body. :attr:`__firstlineno__` diff --git a/Doc/whatsnew/3.13.rst b/Doc/whatsnew/3.13.rst index 5761712a3c714b..5c57b5d7ebe2ff 100644 --- a/Doc/whatsnew/3.13.rst +++ b/Doc/whatsnew/3.13.rst @@ -247,7 +247,7 @@ Improved Error Messages TypeError: split() got an unexpected keyword argument 'max_split'. Did you mean 'maxsplit'? * Classes have a new :attr:`~class.__static_attributes__` attribute, populated by the compiler, - with a tuple of names of attributes of this class which are accessed + with a tuple of names of attributes of this class which are assigned through ``self.X`` from any function in its body. (Contributed by Irit Katriel in :gh:`115775`.) diff --git a/Lib/test/test_compile.py b/Lib/test/test_compile.py index 9def47e101b496..4ebc605a3980f0 100644 --- a/Lib/test/test_compile.py +++ b/Lib/test/test_compile.py @@ -2089,12 +2089,15 @@ def f(): self.assertEqual(end_col, 20) -class TestExpectedAttributes(unittest.TestCase): +class TestStaticAttributes(unittest.TestCase): def test_basic(self): class C: def f(self): self.a = self.b = 42 + # read fields are not included + self.f() + self.arr[3] self.assertIsInstance(C.__static_attributes__, tuple) self.assertEqual(sorted(C.__static_attributes__), ['a', 'b']) diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-07-30-11-41-35.gh-issue-122445.Rq0bjS.rst b/Misc/NEWS.d/next/Core and Builtins/2024-07-30-11-41-35.gh-issue-122445.Rq0bjS.rst new file mode 100644 index 00000000000000..f5aa07c6513ea9 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-07-30-11-41-35.gh-issue-122445.Rq0bjS.rst @@ -0,0 +1 @@ +Add only fields which are modified via self.* to :attr:`~class.__static_attributes__`. diff --git a/Python/compile.c b/Python/compile.c index 02b5345cedd0a3..87b2c2705474a4 100644 --- a/Python/compile.c +++ b/Python/compile.c @@ -563,8 +563,16 @@ compiler_unit_free(struct compiler_unit *u) } static int -compiler_add_static_attribute_to_class(struct compiler *c, PyObject *attr) +compiler_maybe_add_static_attribute_to_class(struct compiler *c, expr_ty e) { + assert(e->kind == Attribute_kind); + expr_ty attr_value = e->v.Attribute.value; + if (attr_value->kind != Name_kind || + e->v.Attribute.ctx != Store || + !_PyUnicode_EqualToASCIIString(attr_value->v.Name.id, "self")) + { + return SUCCESS; + } Py_ssize_t stack_size = PyList_GET_SIZE(c->c_stack); for (Py_ssize_t i = stack_size - 1; i >= 0; i--) { PyObject *capsule = PyList_GET_ITEM(c->c_stack, i); @@ -573,7 +581,7 @@ compiler_add_static_attribute_to_class(struct compiler *c, PyObject *attr) assert(u); if (u->u_scope_type == COMPILER_SCOPE_CLASS) { assert(u->u_static_attributes); - RETURN_IF_ERROR(PySet_Add(u->u_static_attributes, attr)); + RETURN_IF_ERROR(PySet_Add(u->u_static_attributes, e->v.Attribute.attr)); break; } } @@ -6065,11 +6073,7 @@ compiler_visit_expr(struct compiler *c, expr_ty e) ADDOP(c, loc, NOP); return SUCCESS; } - if (e->v.Attribute.value->kind == Name_kind && - _PyUnicode_EqualToASCIIString(e->v.Attribute.value->v.Name.id, "self")) - { - RETURN_IF_ERROR(compiler_add_static_attribute_to_class(c, e->v.Attribute.attr)); - } + RETURN_IF_ERROR(compiler_maybe_add_static_attribute_to_class(c, e)); VISIT(c, expr, e->v.Attribute.value); loc = LOC(e); loc = update_start_location_to_match_attr(c, loc, e); From 7aca84e557d0a6d242f322c493d53947a56bde91 Mon Sep 17 00:00:00 2001 From: Mark Shannon Date: Fri, 2 Aug 2024 16:31:17 +0100 Subject: [PATCH 57/70] GH-117224: Move the body of a few large-ish micro-ops into helper functions (GH-122601) --- Include/internal/pycore_ceval.h | 4 + Python/bytecodes.c | 125 ++--------------------------- Python/ceval.c | 135 ++++++++++++++++++++++++++++++++ Python/executor_cases.c.h | 117 ++------------------------- Python/generated_cases.c.h | 117 ++------------------------- 5 files changed, 163 insertions(+), 335 deletions(-) diff --git a/Include/internal/pycore_ceval.h b/Include/internal/pycore_ceval.h index 4fdee9fdf2a1ff..e4af731be0e87f 100644 --- a/Include/internal/pycore_ceval.h +++ b/Include/internal/pycore_ceval.h @@ -270,6 +270,10 @@ PyAPI_FUNC(PyObject **) _PyObjectArray_FromStackRefArray(_PyStackRef *input, Py_ PyAPI_FUNC(void) _PyObjectArray_Free(PyObject **array, PyObject **scratch); +PyAPI_FUNC(PyObject *) _PyEval_GetANext(PyObject *aiter); +PyAPI_FUNC(PyObject *) _PyEval_LoadGlobal(PyObject *globals, PyObject *builtins, PyObject *name); +PyAPI_FUNC(PyObject *) _PyEval_GetAwaitable(PyObject *iterable, int oparg); +PyAPI_FUNC(PyObject *) _PyEval_LoadName(PyThreadState *tstate, _PyInterpreterFrame *frame, PyObject *name); /* Bits that can be set in PyThreadState.eval_breaker */ #define _PY_GIL_DROP_REQUEST_BIT (1U << 0) diff --git a/Python/bytecodes.c b/Python/bytecodes.c index 414725549d1c20..48b74f93b92ce8 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -1010,77 +1010,16 @@ dummy_func( } inst(GET_ANEXT, (aiter -- aiter, awaitable)) { - unaryfunc getter = NULL; - PyObject *next_iter = NULL; - PyObject *awaitable_o; - PyObject *aiter_o = PyStackRef_AsPyObjectBorrow(aiter); - PyTypeObject *type = Py_TYPE(aiter_o); - - if (PyAsyncGen_CheckExact(aiter_o)) { - awaitable_o = type->tp_as_async->am_anext(aiter_o); - if (awaitable_o == NULL) { - ERROR_NO_POP(); - } - } else { - if (type->tp_as_async != NULL){ - getter = type->tp_as_async->am_anext; - } - - if (getter != NULL) { - next_iter = (*getter)(aiter_o); - if (next_iter == NULL) { - ERROR_NO_POP(); - } - } - else { - _PyErr_Format(tstate, PyExc_TypeError, - "'async for' requires an iterator with " - "__anext__ method, got %.100s", - type->tp_name); - ERROR_NO_POP(); - } - - awaitable_o = _PyCoro_GetAwaitableIter(next_iter); - if (awaitable_o == NULL) { - _PyErr_FormatFromCause( - PyExc_TypeError, - "'async for' received an invalid object " - "from __anext__: %.100s", - Py_TYPE(next_iter)->tp_name); - - Py_DECREF(next_iter); - ERROR_NO_POP(); - } else { - Py_DECREF(next_iter); - } + PyObject *awaitable_o = _PyEval_GetANext(PyStackRef_AsPyObjectBorrow(aiter)); + if (awaitable_o == NULL) { + ERROR_NO_POP(); } awaitable = PyStackRef_FromPyObjectSteal(awaitable_o); } inst(GET_AWAITABLE, (iterable -- iter)) { - PyObject *iter_o = _PyCoro_GetAwaitableIter(PyStackRef_AsPyObjectBorrow(iterable)); - - if (iter_o == NULL) { - _PyEval_FormatAwaitableError(tstate, - Py_TYPE(PyStackRef_AsPyObjectBorrow(iterable)), oparg); - } - + PyObject *iter_o = _PyEval_GetAwaitable(PyStackRef_AsPyObjectBorrow(iterable), oparg); DECREF_INPUTS(); - - if (iter_o != NULL && PyCoro_CheckExact(iter_o)) { - PyObject *yf = _PyGen_yf((PyGenObject*)iter_o); - if (yf != NULL) { - /* `iter` is a coroutine object that is being - awaited, `yf` is a pointer to the current awaitable - being awaited on. */ - Py_DECREF(yf); - Py_CLEAR(iter_o); - _PyErr_SetString(tstate, PyExc_RuntimeError, - "coroutine is being awaited already"); - /* The code below jumps to `error` if `iter` is NULL. */ - } - } - ERROR_IF(iter_o == NULL, error); iter = PyStackRef_FromPyObjectSteal(iter_o); } @@ -1527,27 +1466,9 @@ dummy_func( } inst(LOAD_NAME, (-- v)) { - PyObject *v_o; - PyObject *mod_or_class_dict = LOCALS(); - if (mod_or_class_dict == NULL) { - _PyErr_SetString(tstate, PyExc_SystemError, - "no locals found"); - ERROR_IF(true, error); - } PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); - ERROR_IF(PyMapping_GetOptionalItem(mod_or_class_dict, name, &v_o) < 0, error); - if (v_o == NULL) { - ERROR_IF(PyDict_GetItemRef(GLOBALS(), name, &v_o) < 0, error); - if (v_o == NULL) { - ERROR_IF(PyMapping_GetOptionalItem(BUILTINS(), name, &v_o) < 0, error); - if (v_o == NULL) { - _PyEval_FormatExcCheckArg( - tstate, PyExc_NameError, - NAME_ERROR_MSG, name); - ERROR_IF(true, error); - } - } - } + PyObject *v_o = _PyEval_LoadName(tstate, frame, name); + ERROR_IF(v_o == NULL, error); v = PyStackRef_FromPyObjectSteal(v_o); } @@ -1571,38 +1492,8 @@ dummy_func( op(_LOAD_GLOBAL, ( -- res, null if (oparg & 1))) { PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1); - PyObject *res_o; - if (PyDict_CheckExact(GLOBALS()) - && PyDict_CheckExact(BUILTINS())) - { - res_o = _PyDict_LoadGlobal((PyDictObject *)GLOBALS(), - (PyDictObject *)BUILTINS(), - name); - if (res_o == NULL) { - if (!_PyErr_Occurred(tstate)) { - /* _PyDict_LoadGlobal() returns NULL without raising - * an exception if the key doesn't exist */ - _PyEval_FormatExcCheckArg(tstate, PyExc_NameError, - NAME_ERROR_MSG, name); - } - ERROR_IF(true, error); - } - } - else { - /* Slow-path if globals or builtins is not a dict */ - /* namespace 1: globals */ - ERROR_IF(PyMapping_GetOptionalItem(GLOBALS(), name, &res_o) < 0, error); - if (res_o == NULL) { - /* namespace 2: builtins */ - ERROR_IF(PyMapping_GetOptionalItem(BUILTINS(), name, &res_o) < 0, error); - if (res_o == NULL) { - _PyEval_FormatExcCheckArg( - tstate, PyExc_NameError, - NAME_ERROR_MSG, name); - ERROR_IF(true, error); - } - } - } + PyObject *res_o = _PyEval_LoadGlobal(GLOBALS(), BUILTINS(), name); + ERROR_IF(res_o == NULL, error); null = PyStackRef_NULL; res = PyStackRef_FromPyObjectSteal(res_o); } diff --git a/Python/ceval.c b/Python/ceval.c index 425a2a01bea8ed..f1663ee539aeac 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -720,6 +720,7 @@ _PyObjectArray_Free(PyObject **array, PyObject **scratch) } } + /* _PyEval_EvalFrameDefault() is a *big* function, * so consume 3 units of C stack */ #define PY_EVAL_C_STACK_UNITS 2 @@ -3031,3 +3032,137 @@ void Py_LeaveRecursiveCall(void) { _Py_LeaveRecursiveCall(); } + +PyObject * +_PyEval_GetANext(PyObject *aiter) +{ + unaryfunc getter = NULL; + PyObject *next_iter = NULL; + PyTypeObject *type = Py_TYPE(aiter); + if (PyAsyncGen_CheckExact(aiter)) { + return type->tp_as_async->am_anext(aiter); + } + if (type->tp_as_async != NULL){ + getter = type->tp_as_async->am_anext; + } + + if (getter != NULL) { + next_iter = (*getter)(aiter); + if (next_iter == NULL) { + return NULL; + } + } + else { + PyErr_Format(PyExc_TypeError, + "'async for' requires an iterator with " + "__anext__ method, got %.100s", + type->tp_name); + return NULL; + } + + PyObject *awaitable = _PyCoro_GetAwaitableIter(next_iter); + if (awaitable == NULL) { + _PyErr_FormatFromCause( + PyExc_TypeError, + "'async for' received an invalid object " + "from __anext__: %.100s", + Py_TYPE(next_iter)->tp_name); + } + Py_DECREF(next_iter); + return awaitable; +} + +PyObject * +_PyEval_LoadGlobal(PyObject *globals, PyObject *builtins, PyObject *name) +{ + PyObject *res; + if (PyDict_CheckExact(globals) && PyDict_CheckExact(builtins)) { + res = _PyDict_LoadGlobal((PyDictObject *)globals, + (PyDictObject *)builtins, + name); + if (res == NULL && !PyErr_Occurred()) { + /* _PyDict_LoadGlobal() returns NULL without raising + * an exception if the key doesn't exist */ + _PyEval_FormatExcCheckArg(PyThreadState_GET(), PyExc_NameError, + NAME_ERROR_MSG, name); + } + } + else { + /* Slow-path if globals or builtins is not a dict */ + /* namespace 1: globals */ + if (PyMapping_GetOptionalItem(globals, name, &res) < 0) { + return NULL; + } + if (res == NULL) { + /* namespace 2: builtins */ + if (PyMapping_GetOptionalItem(builtins, name, &res) < 0) { + return NULL; + } + if (res == NULL) { + _PyEval_FormatExcCheckArg( + PyThreadState_GET(), PyExc_NameError, + NAME_ERROR_MSG, name); + } + } + } + return res; +} + +PyObject * +_PyEval_GetAwaitable(PyObject *iterable, int oparg) +{ + PyObject *iter = _PyCoro_GetAwaitableIter(iterable); + + if (iter == NULL) { + _PyEval_FormatAwaitableError(PyThreadState_GET(), + Py_TYPE(iterable), oparg); + } + else if (PyCoro_CheckExact(iter)) { + PyObject *yf = _PyGen_yf((PyGenObject*)iter); + if (yf != NULL) { + /* `iter` is a coroutine object that is being + awaited, `yf` is a pointer to the current awaitable + being awaited on. */ + Py_DECREF(yf); + Py_CLEAR(iter); + _PyErr_SetString(PyThreadState_GET(), PyExc_RuntimeError, + "coroutine is being awaited already"); + } + } + return iter; +} + +PyObject * +_PyEval_LoadName(PyThreadState *tstate, _PyInterpreterFrame *frame, PyObject *name) +{ + + PyObject *value; + if (frame->f_locals == NULL) { + _PyErr_SetString(tstate, PyExc_SystemError, + "no locals found"); + return NULL; + } + if (PyMapping_GetOptionalItem(frame->f_locals, name, &value) < 0) { + return NULL; + } + if (value != NULL) { + return value; + } + if (PyDict_GetItemRef(frame->f_globals, name, &value) < 0) { + return NULL; + } + if (value != NULL) { + return value; + } + if (PyMapping_GetOptionalItem(frame->f_builtins, name, &value) < 0) { + return NULL; + } + if (value == NULL) { + _PyEval_FormatExcCheckArg( + tstate, PyExc_NameError, + NAME_ERROR_MSG, name); + } + return value; +} + + diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h index 61e1c5cf5c289d..7f89196192504b 100644 --- a/Python/executor_cases.c.h +++ b/Python/executor_cases.c.h @@ -1243,45 +1243,9 @@ _PyStackRef aiter; _PyStackRef awaitable; aiter = stack_pointer[-1]; - unaryfunc getter = NULL; - PyObject *next_iter = NULL; - PyObject *awaitable_o; - PyObject *aiter_o = PyStackRef_AsPyObjectBorrow(aiter); - PyTypeObject *type = Py_TYPE(aiter_o); - if (PyAsyncGen_CheckExact(aiter_o)) { - awaitable_o = type->tp_as_async->am_anext(aiter_o); - if (awaitable_o == NULL) { - JUMP_TO_ERROR(); - } - } else { - if (type->tp_as_async != NULL){ - getter = type->tp_as_async->am_anext; - } - if (getter != NULL) { - next_iter = (*getter)(aiter_o); - if (next_iter == NULL) { - JUMP_TO_ERROR(); - } - } - else { - _PyErr_Format(tstate, PyExc_TypeError, - "'async for' requires an iterator with " - "__anext__ method, got %.100s", - type->tp_name); - JUMP_TO_ERROR(); - } - awaitable_o = _PyCoro_GetAwaitableIter(next_iter); - if (awaitable_o == NULL) { - _PyErr_FormatFromCause( - PyExc_TypeError, - "'async for' received an invalid object " - "from __anext__: %.100s", - Py_TYPE(next_iter)->tp_name); - Py_DECREF(next_iter); - JUMP_TO_ERROR(); - } else { - Py_DECREF(next_iter); - } + PyObject *awaitable_o = _PyEval_GetANext(PyStackRef_AsPyObjectBorrow(aiter)); + if (awaitable_o == NULL) { + JUMP_TO_ERROR(); } awaitable = PyStackRef_FromPyObjectSteal(awaitable_o); stack_pointer[0] = awaitable; @@ -1295,25 +1259,8 @@ _PyStackRef iter; oparg = CURRENT_OPARG(); iterable = stack_pointer[-1]; - PyObject *iter_o = _PyCoro_GetAwaitableIter(PyStackRef_AsPyObjectBorrow(iterable)); - if (iter_o == NULL) { - _PyEval_FormatAwaitableError(tstate, - Py_TYPE(PyStackRef_AsPyObjectBorrow(iterable)), oparg); - } + PyObject *iter_o = _PyEval_GetAwaitable(PyStackRef_AsPyObjectBorrow(iterable), oparg); PyStackRef_CLOSE(iterable); - if (iter_o != NULL && PyCoro_CheckExact(iter_o)) { - PyObject *yf = _PyGen_yf((PyGenObject*)iter_o); - if (yf != NULL) { - /* `iter` is a coroutine object that is being - awaited, `yf` is a pointer to the current awaitable - being awaited on. */ - Py_DECREF(yf); - Py_CLEAR(iter_o); - _PyErr_SetString(tstate, PyExc_RuntimeError, - "coroutine is being awaited already"); - /* The code below jumps to `error` if `iter` is NULL. */ - } - } if (iter_o == NULL) JUMP_TO_ERROR(); iter = PyStackRef_FromPyObjectSteal(iter_o); stack_pointer[-1] = iter; @@ -1676,27 +1623,9 @@ case _LOAD_NAME: { _PyStackRef v; oparg = CURRENT_OPARG(); - PyObject *v_o; - PyObject *mod_or_class_dict = LOCALS(); - if (mod_or_class_dict == NULL) { - _PyErr_SetString(tstate, PyExc_SystemError, - "no locals found"); - if (true) JUMP_TO_ERROR(); - } PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); - if (PyMapping_GetOptionalItem(mod_or_class_dict, name, &v_o) < 0) JUMP_TO_ERROR(); - if (v_o == NULL) { - if (PyDict_GetItemRef(GLOBALS(), name, &v_o) < 0) JUMP_TO_ERROR(); - if (v_o == NULL) { - if (PyMapping_GetOptionalItem(BUILTINS(), name, &v_o) < 0) JUMP_TO_ERROR(); - if (v_o == NULL) { - _PyEval_FormatExcCheckArg( - tstate, PyExc_NameError, - NAME_ERROR_MSG, name); - if (true) JUMP_TO_ERROR(); - } - } - } + PyObject *v_o = _PyEval_LoadName(tstate, frame, name); + if (v_o == NULL) JUMP_TO_ERROR(); v = PyStackRef_FromPyObjectSteal(v_o); stack_pointer[0] = v; stack_pointer += 1; @@ -1709,38 +1638,8 @@ _PyStackRef null = PyStackRef_NULL; oparg = CURRENT_OPARG(); PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1); - PyObject *res_o; - if (PyDict_CheckExact(GLOBALS()) - && PyDict_CheckExact(BUILTINS())) - { - res_o = _PyDict_LoadGlobal((PyDictObject *)GLOBALS(), - (PyDictObject *)BUILTINS(), - name); - if (res_o == NULL) { - if (!_PyErr_Occurred(tstate)) { - /* _PyDict_LoadGlobal() returns NULL without raising - * an exception if the key doesn't exist */ - _PyEval_FormatExcCheckArg(tstate, PyExc_NameError, - NAME_ERROR_MSG, name); - } - if (true) JUMP_TO_ERROR(); - } - } - else { - /* Slow-path if globals or builtins is not a dict */ - /* namespace 1: globals */ - if (PyMapping_GetOptionalItem(GLOBALS(), name, &res_o) < 0) JUMP_TO_ERROR(); - if (res_o == NULL) { - /* namespace 2: builtins */ - if (PyMapping_GetOptionalItem(BUILTINS(), name, &res_o) < 0) JUMP_TO_ERROR(); - if (res_o == NULL) { - _PyEval_FormatExcCheckArg( - tstate, PyExc_NameError, - NAME_ERROR_MSG, name); - if (true) JUMP_TO_ERROR(); - } - } - } + PyObject *res_o = _PyEval_LoadGlobal(GLOBALS(), BUILTINS(), name); + if (res_o == NULL) JUMP_TO_ERROR(); null = PyStackRef_NULL; res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[0] = res; diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index 4efaf899f23d1a..bed194e34d5376 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -3468,45 +3468,9 @@ _PyStackRef aiter; _PyStackRef awaitable; aiter = stack_pointer[-1]; - unaryfunc getter = NULL; - PyObject *next_iter = NULL; - PyObject *awaitable_o; - PyObject *aiter_o = PyStackRef_AsPyObjectBorrow(aiter); - PyTypeObject *type = Py_TYPE(aiter_o); - if (PyAsyncGen_CheckExact(aiter_o)) { - awaitable_o = type->tp_as_async->am_anext(aiter_o); - if (awaitable_o == NULL) { - goto error; - } - } else { - if (type->tp_as_async != NULL){ - getter = type->tp_as_async->am_anext; - } - if (getter != NULL) { - next_iter = (*getter)(aiter_o); - if (next_iter == NULL) { - goto error; - } - } - else { - _PyErr_Format(tstate, PyExc_TypeError, - "'async for' requires an iterator with " - "__anext__ method, got %.100s", - type->tp_name); - goto error; - } - awaitable_o = _PyCoro_GetAwaitableIter(next_iter); - if (awaitable_o == NULL) { - _PyErr_FormatFromCause( - PyExc_TypeError, - "'async for' received an invalid object " - "from __anext__: %.100s", - Py_TYPE(next_iter)->tp_name); - Py_DECREF(next_iter); - goto error; - } else { - Py_DECREF(next_iter); - } + PyObject *awaitable_o = _PyEval_GetANext(PyStackRef_AsPyObjectBorrow(aiter)); + if (awaitable_o == NULL) { + goto error; } awaitable = PyStackRef_FromPyObjectSteal(awaitable_o); stack_pointer[0] = awaitable; @@ -3522,25 +3486,8 @@ _PyStackRef iterable; _PyStackRef iter; iterable = stack_pointer[-1]; - PyObject *iter_o = _PyCoro_GetAwaitableIter(PyStackRef_AsPyObjectBorrow(iterable)); - if (iter_o == NULL) { - _PyEval_FormatAwaitableError(tstate, - Py_TYPE(PyStackRef_AsPyObjectBorrow(iterable)), oparg); - } + PyObject *iter_o = _PyEval_GetAwaitable(PyStackRef_AsPyObjectBorrow(iterable), oparg); PyStackRef_CLOSE(iterable); - if (iter_o != NULL && PyCoro_CheckExact(iter_o)) { - PyObject *yf = _PyGen_yf((PyGenObject*)iter_o); - if (yf != NULL) { - /* `iter` is a coroutine object that is being - awaited, `yf` is a pointer to the current awaitable - being awaited on. */ - Py_DECREF(yf); - Py_CLEAR(iter_o); - _PyErr_SetString(tstate, PyExc_RuntimeError, - "coroutine is being awaited already"); - /* The code below jumps to `error` if `iter` is NULL. */ - } - } if (iter_o == NULL) goto pop_1_error; iter = PyStackRef_FromPyObjectSteal(iter_o); stack_pointer[-1] = iter; @@ -5231,38 +5178,8 @@ // _LOAD_GLOBAL { PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1); - PyObject *res_o; - if (PyDict_CheckExact(GLOBALS()) - && PyDict_CheckExact(BUILTINS())) - { - res_o = _PyDict_LoadGlobal((PyDictObject *)GLOBALS(), - (PyDictObject *)BUILTINS(), - name); - if (res_o == NULL) { - if (!_PyErr_Occurred(tstate)) { - /* _PyDict_LoadGlobal() returns NULL without raising - * an exception if the key doesn't exist */ - _PyEval_FormatExcCheckArg(tstate, PyExc_NameError, - NAME_ERROR_MSG, name); - } - if (true) goto error; - } - } - else { - /* Slow-path if globals or builtins is not a dict */ - /* namespace 1: globals */ - if (PyMapping_GetOptionalItem(GLOBALS(), name, &res_o) < 0) goto error; - if (res_o == NULL) { - /* namespace 2: builtins */ - if (PyMapping_GetOptionalItem(BUILTINS(), name, &res_o) < 0) goto error; - if (res_o == NULL) { - _PyEval_FormatExcCheckArg( - tstate, PyExc_NameError, - NAME_ERROR_MSG, name); - if (true) goto error; - } - } - } + PyObject *res_o = _PyEval_LoadGlobal(GLOBALS(), BUILTINS(), name); + if (res_o == NULL) goto error; null = PyStackRef_NULL; res = PyStackRef_FromPyObjectSteal(res_o); } @@ -5375,27 +5292,9 @@ next_instr += 1; INSTRUCTION_STATS(LOAD_NAME); _PyStackRef v; - PyObject *v_o; - PyObject *mod_or_class_dict = LOCALS(); - if (mod_or_class_dict == NULL) { - _PyErr_SetString(tstate, PyExc_SystemError, - "no locals found"); - if (true) goto error; - } PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); - if (PyMapping_GetOptionalItem(mod_or_class_dict, name, &v_o) < 0) goto error; - if (v_o == NULL) { - if (PyDict_GetItemRef(GLOBALS(), name, &v_o) < 0) goto error; - if (v_o == NULL) { - if (PyMapping_GetOptionalItem(BUILTINS(), name, &v_o) < 0) goto error; - if (v_o == NULL) { - _PyEval_FormatExcCheckArg( - tstate, PyExc_NameError, - NAME_ERROR_MSG, name); - if (true) goto error; - } - } - } + PyObject *v_o = _PyEval_LoadName(tstate, frame, name); + if (v_o == NULL) goto error; v = PyStackRef_FromPyObjectSteal(v_o); stack_pointer[0] = v; stack_pointer += 1; From 4b63cd170e5dd840bffc80922f09f2d69932ff5c Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Fri, 2 Aug 2024 12:11:44 -0400 Subject: [PATCH 58/70] gh-122527: Fix a crash on deallocation of `PyStructSequence` (GH-122577) The `PyStructSequence` destructor would crash if it was deallocated after its type's dictionary was cleared by the GC, because it couldn't compute the "real size" of the instance. This could occur with relatively straightforward code in the free-threaded build or with a reference cycle involving the type in the default build, due to differing orders in which `tp_clear()` was called. Account for the non-sequence fields in `tp_basicsize` and use that, along with `Py_SIZE()`, to compute the "real" size of a `PyStructSequence` in the dealloc function. This avoids the accesses to the type's dictionary during dealloc, which were unsafe. --- Lib/test/test_structseq.py | 13 +++++++++ Lib/test/test_sys.py | 3 +- ...-08-01-19-13-58.gh-issue-122527.eztso6.rst | 4 +++ Objects/structseq.c | 28 +++++++++++++++---- 4 files changed, 41 insertions(+), 7 deletions(-) create mode 100644 Misc/NEWS.d/next/Core_and_Builtins/2024-08-01-19-13-58.gh-issue-122527.eztso6.rst diff --git a/Lib/test/test_structseq.py b/Lib/test/test_structseq.py index 6aec63e2603412..d0bc0bd7b61520 100644 --- a/Lib/test/test_structseq.py +++ b/Lib/test/test_structseq.py @@ -2,8 +2,10 @@ import os import pickle import re +import textwrap import time import unittest +from test.support import script_helper class StructSeqTest(unittest.TestCase): @@ -342,6 +344,17 @@ def test_copy_replace_with_unnamed_fields(self): with self.assertRaisesRegex(TypeError, error_message): copy.replace(r, st_mode=1, error=2) + def test_reference_cycle(self): + # gh-122527: Check that a structseq that's part of a reference cycle + # with its own type doesn't crash. Previously, if the type's dictionary + # was cleared first, the structseq instance would crash in the + # destructor. + script_helper.assert_python_ok("-c", textwrap.dedent(r""" + import time + t = time.gmtime() + type(t).refcyle = t + """)) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py index 7e4bc980b390f7..709355e293f2fc 100644 --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -1823,7 +1823,8 @@ def test_pythontypes(self): # symtable entry # XXX # sys.flags - check(sys.flags, vsize('') + self.P * len(sys.flags)) + # FIXME: The +1 will not be necessary once gh-122575 is fixed + check(sys.flags, vsize('') + self.P * (1 + len(sys.flags))) def test_asyncgen_hooks(self): old = sys.get_asyncgen_hooks() diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-08-01-19-13-58.gh-issue-122527.eztso6.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-08-01-19-13-58.gh-issue-122527.eztso6.rst new file mode 100644 index 00000000000000..f697ed99d0c523 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-08-01-19-13-58.gh-issue-122527.eztso6.rst @@ -0,0 +1,4 @@ +Fix a crash that occurred when a ``PyStructSequence`` was deallocated after +its type's dictionary was cleared by the GC. The type's +:c:member:`~PyTypeObject.tp_basicsize` now accounts for non-sequence fields +that aren't included in the :c:macro:`Py_SIZE` of the sequence. diff --git a/Objects/structseq.c b/Objects/structseq.c index d8289f2638db0f..94f09b3ee0a337 100644 --- a/Objects/structseq.c +++ b/Objects/structseq.c @@ -41,12 +41,20 @@ get_type_attr_as_size(PyTypeObject *tp, PyObject *name) get_type_attr_as_size(tp, &_Py_ID(n_sequence_fields)) #define REAL_SIZE_TP(tp) \ get_type_attr_as_size(tp, &_Py_ID(n_fields)) -#define REAL_SIZE(op) REAL_SIZE_TP(Py_TYPE(op)) +#define REAL_SIZE(op) get_real_size((PyObject *)op) #define UNNAMED_FIELDS_TP(tp) \ get_type_attr_as_size(tp, &_Py_ID(n_unnamed_fields)) #define UNNAMED_FIELDS(op) UNNAMED_FIELDS_TP(Py_TYPE(op)) +static Py_ssize_t +get_real_size(PyObject *op) +{ + // Compute the real size from the visible size (i.e., Py_SIZE()) and the + // number of non-sequence fields accounted for in tp_basicsize. + Py_ssize_t hidden = Py_TYPE(op)->tp_basicsize - offsetof(PyStructSequence, ob_item); + return Py_SIZE(op) + hidden / sizeof(PyObject *); +} PyObject * PyStructSequence_New(PyTypeObject *type) @@ -120,6 +128,9 @@ structseq_dealloc(PyStructSequence *obj) PyObject_GC_UnTrack(obj); PyTypeObject *tp = Py_TYPE(obj); + // gh-122527: We can't use REAL_SIZE_TP() or any macros that access the + // type's dictionary here, because the dictionary may have already been + // cleared by the garbage collector. size = REAL_SIZE(obj); for (i = 0; i < size; ++i) { Py_XDECREF(obj->ob_item[i]); @@ -565,10 +576,14 @@ initialize_members(PyStructSequence_Desc *desc, static void initialize_static_fields(PyTypeObject *type, PyStructSequence_Desc *desc, - PyMemberDef *tp_members, unsigned long tp_flags) + PyMemberDef *tp_members, Py_ssize_t n_members, + unsigned long tp_flags) { type->tp_name = desc->name; - type->tp_basicsize = sizeof(PyStructSequence) - sizeof(PyObject *); + // Account for hidden members in tp_basicsize because they are not + // included in the variable size. + Py_ssize_t n_hidden = n_members - desc->n_in_sequence; + type->tp_basicsize = sizeof(PyStructSequence) + (n_hidden - 1) * sizeof(PyObject *); type->tp_itemsize = sizeof(PyObject *); type->tp_dealloc = (destructor)structseq_dealloc; type->tp_repr = (reprfunc)structseq_repr; @@ -621,7 +636,7 @@ _PyStructSequence_InitBuiltinWithFlags(PyInterpreterState *interp, if (members == NULL) { goto error; } - initialize_static_fields(type, desc, members, tp_flags); + initialize_static_fields(type, desc, members, n_members, tp_flags); _Py_SetImmortal((PyObject *)type); } @@ -684,7 +699,7 @@ PyStructSequence_InitType2(PyTypeObject *type, PyStructSequence_Desc *desc) if (members == NULL) { return -1; } - initialize_static_fields(type, desc, members, 0); + initialize_static_fields(type, desc, members, n_members, 0); if (initialize_static_type(type, desc, n_members, n_unnamed_members) < 0) { PyMem_Free(members); return -1; @@ -760,7 +775,8 @@ _PyStructSequence_NewType(PyStructSequence_Desc *desc, unsigned long tp_flags) /* The name in this PyType_Spec is statically allocated so it is */ /* expected that it'll outlive the PyType_Spec */ spec.name = desc->name; - spec.basicsize = sizeof(PyStructSequence) - sizeof(PyObject *); + Py_ssize_t hidden = n_members - desc->n_in_sequence; + spec.basicsize = (int)(sizeof(PyStructSequence) + (hidden - 1) * sizeof(PyObject *)); spec.itemsize = sizeof(PyObject *); spec.flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | tp_flags; spec.slots = slots; From fe0a28d850943cf2ba132c9b0a933bb0c98ff0ae Mon Sep 17 00:00:00 2001 From: Irit Katriel <1055913+iritkatriel@users.noreply.github.com> Date: Fri, 2 Aug 2024 23:56:51 +0100 Subject: [PATCH 59/70] gh-122560: add test that comprehension loop var appears only in one scope of the symtable (#122582) --- Lib/test/test_symtable.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/Lib/test/test_symtable.py b/Lib/test/test_symtable.py index bd367c1591c744..24d89b09d946ad 100644 --- a/Lib/test/test_symtable.py +++ b/Lib/test/test_symtable.py @@ -528,6 +528,27 @@ def test_symtable_entry_repr(self): self.assertEqual(repr(self.top._table), expected) +class ComprehensionTests(unittest.TestCase): + def get_identifiers_recursive(self, st, res): + res.extend(st.get_identifiers()) + for ch in st.get_children(): + self.get_identifiers_recursive(ch, res) + + def test_loopvar_in_only_one_scope(self): + # ensure that the loop variable appears only once in the symtable + comps = [ + "[x for x in [1]]", + "{x for x in [1]}", + "{x:x*x for x in [1]}", + ] + for comp in comps: + with self.subTest(comp=comp): + st = symtable.symtable(comp, "?", "exec") + ids = [] + self.get_identifiers_recursive(st, ids) + self.assertEqual(len([x for x in ids if x == 'x']), 1) + + class CommandLineTest(unittest.TestCase): maxDiff = None From efcd65cd84d5ebcc6cacb67971f235a726a205e7 Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Sat, 3 Aug 2024 12:45:45 +0300 Subject: [PATCH 60/70] gh-122313: Clean up deep recursion guarding code in the compiler (GH-122640) Add ENTER_RECURSIVE and LEAVE_RECURSIVE macros in ast.c, ast_opt.c and symtable.c. Remove VISIT_QUIT macro in symtable.c. The current recursion depth counter only needs to be updated during normal execution -- all functions should just return an error code if an error occurs. --- Python/ast.c | 46 +++++---- Python/ast_opt.c | 39 ++++---- Python/symtable.c | 241 +++++++++++++++++++++++----------------------- 3 files changed, 164 insertions(+), 162 deletions(-) diff --git a/Python/ast.c b/Python/ast.c index 1d1a48ec885686..bf1ff5f3ec18ba 100644 --- a/Python/ast.c +++ b/Python/ast.c @@ -14,6 +14,20 @@ struct validator { int recursion_limit; /* recursion limit */ }; +#define ENTER_RECURSIVE(ST) \ + do { \ + if (++(ST)->recursion_depth > (ST)->recursion_limit) { \ + PyErr_SetString(PyExc_RecursionError, \ + "maximum recursion depth exceeded during compilation"); \ + return 0; \ + } \ + } while(0) + +#define LEAVE_RECURSIVE(ST) \ + do { \ + --(ST)->recursion_depth; \ + } while(0) + static int validate_stmts(struct validator *, asdl_stmt_seq *); static int validate_exprs(struct validator *, asdl_expr_seq *, expr_context_ty, int); static int validate_patterns(struct validator *, asdl_pattern_seq *, int); @@ -166,11 +180,7 @@ validate_constant(struct validator *state, PyObject *value) return 1; if (PyTuple_CheckExact(value) || PyFrozenSet_CheckExact(value)) { - if (++state->recursion_depth > state->recursion_limit) { - PyErr_SetString(PyExc_RecursionError, - "maximum recursion depth exceeded during compilation"); - return 0; - } + ENTER_RECURSIVE(state); PyObject *it = PyObject_GetIter(value); if (it == NULL) @@ -195,7 +205,7 @@ validate_constant(struct validator *state, PyObject *value) } Py_DECREF(it); - --state->recursion_depth; + LEAVE_RECURSIVE(state); return 1; } @@ -213,11 +223,7 @@ validate_expr(struct validator *state, expr_ty exp, expr_context_ty ctx) assert(!PyErr_Occurred()); VALIDATE_POSITIONS(exp); int ret = -1; - if (++state->recursion_depth > state->recursion_limit) { - PyErr_SetString(PyExc_RecursionError, - "maximum recursion depth exceeded during compilation"); - return 0; - } + ENTER_RECURSIVE(state); int check_ctx = 1; expr_context_ty actual_ctx; @@ -398,7 +404,7 @@ validate_expr(struct validator *state, expr_ty exp, expr_context_ty ctx) PyErr_SetString(PyExc_SystemError, "unexpected expression"); ret = 0; } - state->recursion_depth--; + LEAVE_RECURSIVE(state); return ret; } @@ -544,11 +550,7 @@ validate_pattern(struct validator *state, pattern_ty p, int star_ok) assert(!PyErr_Occurred()); VALIDATE_POSITIONS(p); int ret = -1; - if (++state->recursion_depth > state->recursion_limit) { - PyErr_SetString(PyExc_RecursionError, - "maximum recursion depth exceeded during compilation"); - return 0; - } + ENTER_RECURSIVE(state); switch (p->kind) { case MatchValue_kind: ret = validate_pattern_match_value(state, p->v.MatchValue.value); @@ -690,7 +692,7 @@ validate_pattern(struct validator *state, pattern_ty p, int star_ok) PyErr_SetString(PyExc_SystemError, "unexpected pattern"); ret = 0; } - state->recursion_depth--; + LEAVE_RECURSIVE(state); return ret; } @@ -725,11 +727,7 @@ validate_stmt(struct validator *state, stmt_ty stmt) assert(!PyErr_Occurred()); VALIDATE_POSITIONS(stmt); int ret = -1; - if (++state->recursion_depth > state->recursion_limit) { - PyErr_SetString(PyExc_RecursionError, - "maximum recursion depth exceeded during compilation"); - return 0; - } + ENTER_RECURSIVE(state); switch (stmt->kind) { case FunctionDef_kind: ret = validate_body(state, stmt->v.FunctionDef.body, "FunctionDef") && @@ -946,7 +944,7 @@ validate_stmt(struct validator *state, stmt_ty stmt) PyErr_SetString(PyExc_SystemError, "unexpected statement"); ret = 0; } - state->recursion_depth--; + LEAVE_RECURSIVE(state); return ret; } diff --git a/Python/ast_opt.c b/Python/ast_opt.c index 2e2c78b9d4d7d2..d7a26e64150e55 100644 --- a/Python/ast_opt.c +++ b/Python/ast_opt.c @@ -15,6 +15,19 @@ typedef struct { int recursion_limit; /* recursion limit */ } _PyASTOptimizeState; +#define ENTER_RECURSIVE(ST) \ + do { \ + if (++(ST)->recursion_depth > (ST)->recursion_limit) { \ + PyErr_SetString(PyExc_RecursionError, \ + "maximum recursion depth exceeded during compilation"); \ + return 0; \ + } \ + } while(0) + +#define LEAVE_RECURSIVE(ST) \ + do { \ + --(ST)->recursion_depth; \ + } while(0) static int make_const(expr_ty node, PyObject *val, PyArena *arena) @@ -708,11 +721,7 @@ astfold_mod(mod_ty node_, PyArena *ctx_, _PyASTOptimizeState *state) static int astfold_expr(expr_ty node_, PyArena *ctx_, _PyASTOptimizeState *state) { - if (++state->recursion_depth > state->recursion_limit) { - PyErr_SetString(PyExc_RecursionError, - "maximum recursion depth exceeded during compilation"); - return 0; - } + ENTER_RECURSIVE(state); switch (node_->kind) { case BoolOp_kind: CALL_SEQ(astfold_expr, expr, node_->v.BoolOp.values); @@ -811,7 +820,7 @@ astfold_expr(expr_ty node_, PyArena *ctx_, _PyASTOptimizeState *state) case Name_kind: if (node_->v.Name.ctx == Load && _PyUnicode_EqualToASCIIString(node_->v.Name.id, "__debug__")) { - state->recursion_depth--; + LEAVE_RECURSIVE(state); return make_const(node_, PyBool_FromLong(!state->optimize), ctx_); } break; @@ -824,7 +833,7 @@ astfold_expr(expr_ty node_, PyArena *ctx_, _PyASTOptimizeState *state) // No default case, so the compiler will emit a warning if new expression // kinds are added without being handled here } - state->recursion_depth--; + LEAVE_RECURSIVE(state);; return 1; } @@ -871,11 +880,7 @@ astfold_arg(arg_ty node_, PyArena *ctx_, _PyASTOptimizeState *state) static int astfold_stmt(stmt_ty node_, PyArena *ctx_, _PyASTOptimizeState *state) { - if (++state->recursion_depth > state->recursion_limit) { - PyErr_SetString(PyExc_RecursionError, - "maximum recursion depth exceeded during compilation"); - return 0; - } + ENTER_RECURSIVE(state); switch (node_->kind) { case FunctionDef_kind: CALL_SEQ(astfold_type_param, type_param, node_->v.FunctionDef.type_params); @@ -999,7 +1004,7 @@ astfold_stmt(stmt_ty node_, PyArena *ctx_, _PyASTOptimizeState *state) // No default case, so the compiler will emit a warning if new statement // kinds are added without being handled here } - state->recursion_depth--; + LEAVE_RECURSIVE(state); return 1; } @@ -1031,11 +1036,7 @@ astfold_pattern(pattern_ty node_, PyArena *ctx_, _PyASTOptimizeState *state) // Currently, this is really only used to form complex/negative numeric // constants in MatchValue and MatchMapping nodes // We still recurse into all subexpressions and subpatterns anyway - if (++state->recursion_depth > state->recursion_limit) { - PyErr_SetString(PyExc_RecursionError, - "maximum recursion depth exceeded during compilation"); - return 0; - } + ENTER_RECURSIVE(state); switch (node_->kind) { case MatchValue_kind: CALL(astfold_expr, expr_ty, node_->v.MatchValue.value); @@ -1067,7 +1068,7 @@ astfold_pattern(pattern_ty node_, PyArena *ctx_, _PyASTOptimizeState *state) // No default case, so the compiler will emit a warning if new pattern // kinds are added without being handled here } - state->recursion_depth--; + LEAVE_RECURSIVE(state); return 1; } diff --git a/Python/symtable.c b/Python/symtable.c index 89a0d8a2ccec1a..ef81a0799de3aa 100644 --- a/Python/symtable.c +++ b/Python/symtable.c @@ -1617,17 +1617,17 @@ symtable_enter_type_param_block(struct symtable *st, identifier name, VISIT_SEQ_TAIL permits the start of an ASDL sequence to be skipped, which is useful if the first node in the sequence requires special treatment. - VISIT_QUIT macro returns the specified value exiting from the function but - first adjusts current recursion counter depth. -*/ + ENTER_RECURSIVE macro increments the current recursion depth counter. + It should be used at the beginning of the recursive function. -#define VISIT_QUIT(ST, X) \ - return --(ST)->recursion_depth,(X) + LEAVE_RECURSIVE macro decrements the current recursion depth counter. + It should be used at the end of the recursive function. +*/ #define VISIT(ST, TYPE, V) \ do { \ if (!symtable_visit_ ## TYPE((ST), (V))) { \ - VISIT_QUIT((ST), 0); \ + return 0; \ } \ } while(0) @@ -1638,7 +1638,7 @@ symtable_enter_type_param_block(struct symtable *st, identifier name, for (i = 0; i < asdl_seq_LEN(seq); i++) { \ TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, i); \ if (!symtable_visit_ ## TYPE((ST), elt)) \ - VISIT_QUIT((ST), 0); \ + return 0; \ } \ } while(0) @@ -1649,7 +1649,7 @@ symtable_enter_type_param_block(struct symtable *st, identifier name, for (i = (START); i < asdl_seq_LEN(seq); i++) { \ TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, i); \ if (!symtable_visit_ ## TYPE((ST), elt)) \ - VISIT_QUIT((ST), 0); \ + return 0; \ } \ } while(0) @@ -1661,10 +1661,25 @@ symtable_enter_type_param_block(struct symtable *st, identifier name, TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, i); \ if (!elt) continue; /* can be NULL */ \ if (!symtable_visit_ ## TYPE((ST), elt)) \ - VISIT_QUIT((ST), 0); \ + return 0; \ } \ } while(0) +#define ENTER_RECURSIVE(ST) \ + do { \ + if (++(ST)->recursion_depth > (ST)->recursion_limit) { \ + PyErr_SetString(PyExc_RecursionError, \ + "maximum recursion depth exceeded during compilation"); \ + return 0; \ + } \ + } while(0) + +#define LEAVE_RECURSIVE(ST) \ + do { \ + --(ST)->recursion_depth; \ + } while(0) + + static int symtable_record_directive(struct symtable *st, identifier name, _Py_SourceLocation loc) { @@ -1737,15 +1752,11 @@ maybe_set_ste_coroutine_for_module(struct symtable *st, stmt_ty s) static int symtable_visit_stmt(struct symtable *st, stmt_ty s) { - if (++st->recursion_depth > st->recursion_limit) { - PyErr_SetString(PyExc_RecursionError, - "maximum recursion depth exceeded during compilation"); - VISIT_QUIT(st, 0); - } + ENTER_RECURSIVE(st); switch (s->kind) { case FunctionDef_kind: { if (!symtable_add_def(st, s->v.FunctionDef.name, DEF_LOCAL, LOCATION(s))) - VISIT_QUIT(st, 0); + return 0; if (s->v.FunctionDef.args->defaults) VISIT_SEQ(st, expr, s->v.FunctionDef.args->defaults); if (s->v.FunctionDef.args->kw_defaults) @@ -1761,40 +1772,40 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) s->v.FunctionDef.args->kw_defaults), s->kind, LOCATION(s))) { - VISIT_QUIT(st, 0); + return 0; } VISIT_SEQ(st, type_param, s->v.FunctionDef.type_params); } PySTEntryObject *new_ste = ste_new(st, s->v.FunctionDef.name, FunctionBlock, (void *)s, LOCATION(s)); if (!new_ste) { - VISIT_QUIT(st, 0); + return 0; } if (!symtable_visit_annotations(st, s, s->v.FunctionDef.args, s->v.FunctionDef.returns, new_ste)) { Py_DECREF(new_ste); - VISIT_QUIT(st, 0); + return 0; } if (!symtable_enter_existing_block(st, new_ste)) { Py_DECREF(new_ste); - VISIT_QUIT(st, 0); + return 0; } Py_DECREF(new_ste); VISIT(st, arguments, s->v.FunctionDef.args); VISIT_SEQ(st, stmt, s->v.FunctionDef.body); if (!symtable_exit_block(st)) - VISIT_QUIT(st, 0); + return 0; if (asdl_seq_LEN(s->v.FunctionDef.type_params) > 0) { if (!symtable_exit_block(st)) - VISIT_QUIT(st, 0); + return 0; } break; } case ClassDef_kind: { PyObject *tmp; if (!symtable_add_def(st, s->v.ClassDef.name, DEF_LOCAL, LOCATION(s))) - VISIT_QUIT(st, 0); + return 0; if (s->v.ClassDef.decorator_list) VISIT_SEQ(st, expr, s->v.ClassDef.decorator_list); tmp = st->st_private; @@ -1803,42 +1814,42 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) (void *)s->v.ClassDef.type_params, false, false, s->kind, LOCATION(s))) { - VISIT_QUIT(st, 0); + return 0; } st->st_private = s->v.ClassDef.name; st->st_cur->ste_mangled_names = PySet_New(NULL); if (!st->st_cur->ste_mangled_names) { - VISIT_QUIT(st, 0); + return 0; } VISIT_SEQ(st, type_param, s->v.ClassDef.type_params); } VISIT_SEQ(st, expr, s->v.ClassDef.bases); if (!check_keywords(st, s->v.ClassDef.keywords)) { - VISIT_QUIT(st, 0); + return 0; } VISIT_SEQ(st, keyword, s->v.ClassDef.keywords); if (!symtable_enter_block(st, s->v.ClassDef.name, ClassBlock, (void *)s, LOCATION(s))) { - VISIT_QUIT(st, 0); + return 0; } st->st_private = s->v.ClassDef.name; if (asdl_seq_LEN(s->v.ClassDef.type_params) > 0) { if (!symtable_add_def(st, &_Py_ID(__type_params__), DEF_LOCAL, LOCATION(s))) { - VISIT_QUIT(st, 0); + return 0; } _Py_DECLARE_STR(type_params, ".type_params"); if (!symtable_add_def(st, &_Py_STR(type_params), USE, LOCATION(s))) { - VISIT_QUIT(st, 0); + return 0; } } VISIT_SEQ(st, stmt, s->v.ClassDef.body); if (!symtable_exit_block(st)) - VISIT_QUIT(st, 0); + return 0; if (asdl_seq_LEN(s->v.ClassDef.type_params) > 0) { if (!symtable_exit_block(st)) - VISIT_QUIT(st, 0); + return 0; } st->st_private = tmp; break; @@ -1855,24 +1866,24 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) (void *)s->v.TypeAlias.type_params, false, false, s->kind, LOCATION(s))) { - VISIT_QUIT(st, 0); + return 0; } VISIT_SEQ(st, type_param, s->v.TypeAlias.type_params); } if (!symtable_enter_block(st, name, TypeAliasBlock, (void *)s, LOCATION(s))) { - VISIT_QUIT(st, 0); + return 0; } st->st_cur->ste_can_see_class_scope = is_in_class; if (is_in_class && !symtable_add_def(st, &_Py_ID(__classdict__), USE, LOCATION(s->v.TypeAlias.value))) { - VISIT_QUIT(st, 0); + return 0; } VISIT(st, expr, s->v.TypeAlias.value); if (!symtable_exit_block(st)) - VISIT_QUIT(st, 0); + return 0; if (is_generic) { if (!symtable_exit_block(st)) - VISIT_QUIT(st, 0); + return 0; } break; } @@ -1895,7 +1906,7 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) expr_ty e_name = s->v.AnnAssign.target; long cur = symtable_lookup(st, e_name->v.Name.id); if (cur < 0) { - VISIT_QUIT(st, 0); + return 0; } if ((cur & (DEF_GLOBAL | DEF_NONLOCAL)) && (st->st_cur->ste_symbols != st->st_global) @@ -1904,17 +1915,17 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) cur & DEF_GLOBAL ? GLOBAL_ANNOT : NONLOCAL_ANNOT, e_name->v.Name.id); SET_ERROR_LOCATION(st->st_filename, LOCATION(s)); - VISIT_QUIT(st, 0); + return 0; } if (s->v.AnnAssign.simple && !symtable_add_def(st, e_name->v.Name.id, DEF_ANNOT | DEF_LOCAL, LOCATION(e_name))) { - VISIT_QUIT(st, 0); + return 0; } else { if (s->v.AnnAssign.value && !symtable_add_def(st, e_name->v.Name.id, DEF_LOCAL, LOCATION(e_name))) { - VISIT_QUIT(st, 0); + return 0; } } } @@ -1923,7 +1934,7 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) } if (!symtable_visit_annotation(st, s->v.AnnAssign.annotation, (void *)((uintptr_t)st->st_cur->ste_id + 1))) { - VISIT_QUIT(st, 0); + return 0; } if (s->v.AnnAssign.value) { @@ -1990,7 +2001,7 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) case ImportFrom_kind: VISIT_SEQ(st, alias, s->v.ImportFrom.names); if (!check_import_from(st, s)) { - VISIT_QUIT(st, 0); + return 0; } break; case Global_kind: { @@ -2000,7 +2011,7 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) identifier name = (identifier)asdl_seq_GET(seq, i); long cur = symtable_lookup(st, name); if (cur < 0) - VISIT_QUIT(st, 0); + return 0; if (cur & (DEF_PARAM | DEF_LOCAL | USE | DEF_ANNOT)) { const char* msg; if (cur & DEF_PARAM) { @@ -2015,13 +2026,13 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) PyErr_Format(PyExc_SyntaxError, msg, name); SET_ERROR_LOCATION(st->st_filename, LOCATION(s)); - VISIT_QUIT(st, 0); + return 0; } if (!symtable_add_def(st, name, DEF_GLOBAL, LOCATION(s))) { - VISIT_QUIT(st, 0); + return 0; } if (!symtable_record_directive(st, name, LOCATION(s))) { - VISIT_QUIT(st, 0); + return 0; } } break; @@ -2033,7 +2044,7 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) identifier name = (identifier)asdl_seq_GET(seq, i); long cur = symtable_lookup(st, name); if (cur < 0) - VISIT_QUIT(st, 0); + return 0; if (cur & (DEF_PARAM | DEF_LOCAL | USE | DEF_ANNOT)) { const char* msg; if (cur & DEF_PARAM) { @@ -2047,12 +2058,12 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) } PyErr_Format(PyExc_SyntaxError, msg, name); SET_ERROR_LOCATION(st->st_filename, LOCATION(s)); - VISIT_QUIT(st, 0); + return 0; } if (!symtable_add_def(st, name, DEF_NONLOCAL, LOCATION(s))) - VISIT_QUIT(st, 0); + return 0; if (!symtable_record_directive(st, name, LOCATION(s))) { - VISIT_QUIT(st, 0); + return 0; } } break; @@ -2071,7 +2082,7 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) break; case AsyncFunctionDef_kind: { if (!symtable_add_def(st, s->v.AsyncFunctionDef.name, DEF_LOCAL, LOCATION(s))) - VISIT_QUIT(st, 0); + return 0; if (s->v.AsyncFunctionDef.args->defaults) VISIT_SEQ(st, expr, s->v.AsyncFunctionDef.args->defaults); if (s->v.AsyncFunctionDef.args->kw_defaults) @@ -2088,24 +2099,24 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) s->v.AsyncFunctionDef.args->kw_defaults), s->kind, LOCATION(s))) { - VISIT_QUIT(st, 0); + return 0; } VISIT_SEQ(st, type_param, s->v.AsyncFunctionDef.type_params); } PySTEntryObject *new_ste = ste_new(st, s->v.FunctionDef.name, FunctionBlock, (void *)s, LOCATION(s)); if (!new_ste) { - VISIT_QUIT(st, 0); + return 0; } if (!symtable_visit_annotations(st, s, s->v.AsyncFunctionDef.args, s->v.AsyncFunctionDef.returns, new_ste)) { Py_DECREF(new_ste); - VISIT_QUIT(st, 0); + return 0; } if (!symtable_enter_existing_block(st, new_ste)) { Py_DECREF(new_ste); - VISIT_QUIT(st, 0); + return 0; } Py_DECREF(new_ste); @@ -2113,17 +2124,17 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) VISIT(st, arguments, s->v.AsyncFunctionDef.args); VISIT_SEQ(st, stmt, s->v.AsyncFunctionDef.body); if (!symtable_exit_block(st)) - VISIT_QUIT(st, 0); + return 0; if (asdl_seq_LEN(s->v.AsyncFunctionDef.type_params) > 0) { if (!symtable_exit_block(st)) - VISIT_QUIT(st, 0); + return 0; } break; } case AsyncWith_kind: maybe_set_ste_coroutine_for_module(st, s); if (!symtable_raise_if_not_coroutine(st, ASYNC_WITH_OUTSIDE_ASYNC_FUNC, LOCATION(s))) { - VISIT_QUIT(st, 0); + return 0; } VISIT_SEQ(st, withitem, s->v.AsyncWith.items); VISIT_SEQ(st, stmt, s->v.AsyncWith.body); @@ -2131,7 +2142,7 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) case AsyncFor_kind: maybe_set_ste_coroutine_for_module(st, s); if (!symtable_raise_if_not_coroutine(st, ASYNC_FOR_OUTSIDE_ASYNC_FUNC, LOCATION(s))) { - VISIT_QUIT(st, 0); + return 0; } VISIT(st, expr, s->v.AsyncFor.target); VISIT(st, expr, s->v.AsyncFor.iter); @@ -2140,7 +2151,8 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s) VISIT_SEQ(st, stmt, s->v.AsyncFor.orelse); break; } - VISIT_QUIT(st, 1); + LEAVE_RECURSIVE(st); + return 1; } static int @@ -2168,7 +2180,7 @@ symtable_extend_namedexpr_scope(struct symtable *st, expr_ty e) (target_in_scope & DEF_LOCAL)) { PyErr_Format(PyExc_SyntaxError, NAMED_EXPR_COMP_CONFLICT, target_name); SET_ERROR_LOCATION(st->st_filename, LOCATION(e)); - VISIT_QUIT(st, 0); + return 0; } continue; } @@ -2178,14 +2190,14 @@ symtable_extend_namedexpr_scope(struct symtable *st, expr_ty e) long target_in_scope = symtable_lookup_entry(st, ste, target_name); if (target_in_scope & DEF_GLOBAL) { if (!symtable_add_def(st, target_name, DEF_GLOBAL, LOCATION(e))) - VISIT_QUIT(st, 0); + return 0; } else { if (!symtable_add_def(st, target_name, DEF_NONLOCAL, LOCATION(e))) { - VISIT_QUIT(st, 0); + return 0; } } if (!symtable_record_directive(st, target_name, LOCATION(e))) { - VISIT_QUIT(st, 0); + return 0; } return symtable_add_def_helper(st, target_name, DEF_LOCAL, ste, LOCATION(e)); @@ -2193,10 +2205,10 @@ symtable_extend_namedexpr_scope(struct symtable *st, expr_ty e) /* If we find a ModuleBlock entry, add as GLOBAL */ if (ste->ste_type == ModuleBlock) { if (!symtable_add_def(st, target_name, DEF_GLOBAL, LOCATION(e))) { - VISIT_QUIT(st, 0); + return 0; } if (!symtable_record_directive(st, target_name, LOCATION(e))) { - VISIT_QUIT(st, 0); + return 0; } return symtable_add_def_helper(st, target_name, DEF_GLOBAL, ste, LOCATION(e)); @@ -2223,7 +2235,7 @@ symtable_extend_namedexpr_scope(struct symtable *st, expr_ty e) Py_UNREACHABLE(); } SET_ERROR_LOCATION(st->st_filename, LOCATION(e)); - VISIT_QUIT(st, 0); + return 0; } } @@ -2256,18 +2268,14 @@ symtable_handle_namedexpr(struct symtable *st, expr_ty e) static int symtable_visit_expr(struct symtable *st, expr_ty e) { - if (++st->recursion_depth > st->recursion_limit) { - PyErr_SetString(PyExc_RecursionError, - "maximum recursion depth exceeded during compilation"); - VISIT_QUIT(st, 0); - } + ENTER_RECURSIVE(st); switch (e->kind) { case NamedExpr_kind: if (!symtable_raise_if_annotation_block(st, "named expression", e)) { - VISIT_QUIT(st, 0); + return 0; } if(!symtable_handle_namedexpr(st, e)) - VISIT_QUIT(st, 0); + return 0; break; case BoolOp_kind: VISIT_SEQ(st, expr, e->v.BoolOp.values); @@ -2286,12 +2294,12 @@ symtable_visit_expr(struct symtable *st, expr_ty e) VISIT_SEQ_WITH_NULL(st, expr, e->v.Lambda.args->kw_defaults); if (!symtable_enter_block(st, &_Py_ID(lambda), FunctionBlock, (void *)e, LOCATION(e))) { - VISIT_QUIT(st, 0); + return 0; } VISIT(st, arguments, e->v.Lambda.args); VISIT(st, expr, e->v.Lambda.body); if (!symtable_exit_block(st)) - VISIT_QUIT(st, 0); + return 0; break; } case IfExp_kind: @@ -2308,23 +2316,23 @@ symtable_visit_expr(struct symtable *st, expr_ty e) break; case GeneratorExp_kind: if (!symtable_visit_genexp(st, e)) - VISIT_QUIT(st, 0); + return 0; break; case ListComp_kind: if (!symtable_visit_listcomp(st, e)) - VISIT_QUIT(st, 0); + return 0; break; case SetComp_kind: if (!symtable_visit_setcomp(st, e)) - VISIT_QUIT(st, 0); + return 0; break; case DictComp_kind: if (!symtable_visit_dictcomp(st, e)) - VISIT_QUIT(st, 0); + return 0; break; case Yield_kind: if (!symtable_raise_if_annotation_block(st, "yield expression", e)) { - VISIT_QUIT(st, 0); + return 0; } if (e->v.Yield.value) VISIT(st, expr, e->v.Yield.value); @@ -2335,7 +2343,7 @@ symtable_visit_expr(struct symtable *st, expr_ty e) break; case YieldFrom_kind: if (!symtable_raise_if_annotation_block(st, "yield expression", e)) { - VISIT_QUIT(st, 0); + return 0; } VISIT(st, expr, e->v.YieldFrom.value); st->st_cur->ste_generator = 1; @@ -2345,20 +2353,20 @@ symtable_visit_expr(struct symtable *st, expr_ty e) break; case Await_kind: if (!symtable_raise_if_annotation_block(st, "await expression", e)) { - VISIT_QUIT(st, 0); + return 0; } if (!allows_top_level_await(st)) { if (!_PyST_IsFunctionLike(st->st_cur)) { PyErr_SetString(PyExc_SyntaxError, "'await' outside function"); SET_ERROR_LOCATION(st->st_filename, LOCATION(e)); - VISIT_QUIT(st, 0); + return 0; } if (!IS_ASYNC_DEF(st) && st->st_cur->ste_comprehension == NoComprehension) { PyErr_SetString(PyExc_SyntaxError, "'await' outside async function"); SET_ERROR_LOCATION(st->st_filename, LOCATION(e)); - VISIT_QUIT(st, 0); + return 0; } } VISIT(st, expr, e->v.Await.value); @@ -2372,7 +2380,7 @@ symtable_visit_expr(struct symtable *st, expr_ty e) VISIT(st, expr, e->v.Call.func); VISIT_SEQ(st, expr, e->v.Call.args); if (!check_keywords(st, e->v.Call.keywords)) { - VISIT_QUIT(st, 0); + return 0; } VISIT_SEQ_WITH_NULL(st, keyword, e->v.Call.keywords); break; @@ -2390,7 +2398,7 @@ symtable_visit_expr(struct symtable *st, expr_ty e) /* The following exprs can be assignment targets. */ case Attribute_kind: if (!check_name(st, e->v.Attribute.attr, LOCATION(e), e->v.Attribute.ctx)) { - VISIT_QUIT(st, 0); + return 0; } VISIT(st, expr, e->v.Attribute.value); break; @@ -2413,14 +2421,14 @@ symtable_visit_expr(struct symtable *st, expr_ty e) if (!symtable_add_def_ctx(st, e->v.Name.id, e->v.Name.ctx == Load ? USE : DEF_LOCAL, LOCATION(e), e->v.Name.ctx)) { - VISIT_QUIT(st, 0); + return 0; } /* Special-case super: it counts as a use of __class__ */ if (e->v.Name.ctx == Load && _PyST_IsFunctionLike(st->st_cur) && _PyUnicode_EqualToASCIIString(e->v.Name.id, "super")) { if (!symtable_add_def(st, &_Py_ID(__class__), USE, LOCATION(e))) - VISIT_QUIT(st, 0); + return 0; } break; /* child nodes of List and Tuple will have expr_context set */ @@ -2431,7 +2439,8 @@ symtable_visit_expr(struct symtable *st, expr_ty e) VISIT_SEQ(st, expr, e->v.Tuple.elts); break; } - VISIT_QUIT(st, 1); + LEAVE_RECURSIVE(st); + return 1; } static int @@ -2447,7 +2456,7 @@ symtable_visit_type_param_bound_or_default( st->st_cur->ste_can_see_class_scope = is_in_class; if (is_in_class && !symtable_add_def(st, &_Py_ID(__classdict__), USE, LOCATION(e))) { - VISIT_QUIT(st, 0); + return 0; } assert(ste_scope_info != NULL); @@ -2464,15 +2473,11 @@ symtable_visit_type_param_bound_or_default( static int symtable_visit_type_param(struct symtable *st, type_param_ty tp) { - if (++st->recursion_depth > st->recursion_limit) { - PyErr_SetString(PyExc_RecursionError, - "maximum recursion depth exceeded during compilation"); - VISIT_QUIT(st, 0); - } + ENTER_RECURSIVE(st); switch(tp->kind) { case TypeVar_kind: if (!symtable_add_def(st, tp->v.TypeVar.name, DEF_TYPE_PARAM | DEF_LOCAL, LOCATION(tp))) - VISIT_QUIT(st, 0); + return 0; const char *ste_scope_info = NULL; const expr_ty bound = tp->v.TypeVar.bound; @@ -2488,46 +2493,43 @@ symtable_visit_type_param(struct symtable *st, type_param_ty tp) // compile.c where the scope is retrieved. if (!symtable_visit_type_param_bound_or_default(st, tp->v.TypeVar.bound, tp->v.TypeVar.name, (void *)tp, ste_scope_info)) { - VISIT_QUIT(st, 0); + return 0; } if (!symtable_visit_type_param_bound_or_default(st, tp->v.TypeVar.default_value, tp->v.TypeVar.name, (void *)((uintptr_t)tp + 1), "a TypeVar default")) { - VISIT_QUIT(st, 0); + return 0; } break; case TypeVarTuple_kind: if (!symtable_add_def(st, tp->v.TypeVarTuple.name, DEF_TYPE_PARAM | DEF_LOCAL, LOCATION(tp))) { - VISIT_QUIT(st, 0); + return 0; } if (!symtable_visit_type_param_bound_or_default(st, tp->v.TypeVarTuple.default_value, tp->v.TypeVarTuple.name, (void *)tp, "a TypeVarTuple default")) { - VISIT_QUIT(st, 0); + return 0; } break; case ParamSpec_kind: if (!symtable_add_def(st, tp->v.ParamSpec.name, DEF_TYPE_PARAM | DEF_LOCAL, LOCATION(tp))) { - VISIT_QUIT(st, 0); + return 0; } if (!symtable_visit_type_param_bound_or_default(st, tp->v.ParamSpec.default_value, tp->v.ParamSpec.name, (void *)tp, "a ParamSpec default")) { - VISIT_QUIT(st, 0); + return 0; } break; } - VISIT_QUIT(st, 1); + LEAVE_RECURSIVE(st); + return 1; } static int symtable_visit_pattern(struct symtable *st, pattern_ty p) { - if (++st->recursion_depth > st->recursion_limit) { - PyErr_SetString(PyExc_RecursionError, - "maximum recursion depth exceeded during compilation"); - VISIT_QUIT(st, 0); - } + ENTER_RECURSIVE(st); switch (p->kind) { case MatchValue_kind: VISIT(st, expr, p->v.MatchValue.value); @@ -2541,7 +2543,7 @@ symtable_visit_pattern(struct symtable *st, pattern_ty p) case MatchStar_kind: if (p->v.MatchStar.name) { if (!symtable_add_def(st, p->v.MatchStar.name, DEF_LOCAL, LOCATION(p))) { - VISIT_QUIT(st, 0); + return 0; } } break; @@ -2550,7 +2552,7 @@ symtable_visit_pattern(struct symtable *st, pattern_ty p) VISIT_SEQ(st, pattern, p->v.MatchMapping.patterns); if (p->v.MatchMapping.rest) { if (!symtable_add_def(st, p->v.MatchMapping.rest, DEF_LOCAL, LOCATION(p))) { - VISIT_QUIT(st, 0); + return 0; } } break; @@ -2558,7 +2560,7 @@ symtable_visit_pattern(struct symtable *st, pattern_ty p) VISIT(st, expr, p->v.MatchClass.cls); VISIT_SEQ(st, pattern, p->v.MatchClass.patterns); if (!check_kwd_patterns(st, p)) { - VISIT_QUIT(st, 0); + return 0; } VISIT_SEQ(st, pattern, p->v.MatchClass.kwd_patterns); break; @@ -2568,7 +2570,7 @@ symtable_visit_pattern(struct symtable *st, pattern_ty p) } if (p->v.MatchAs.name) { if (!symtable_add_def(st, p->v.MatchAs.name, DEF_LOCAL, LOCATION(p))) { - VISIT_QUIT(st, 0); + return 0; } } break; @@ -2576,7 +2578,8 @@ symtable_visit_pattern(struct symtable *st, pattern_ty p) VISIT_SEQ(st, pattern, p->v.MatchOr.patterns); break; } - VISIT_QUIT(st, 1); + LEAVE_RECURSIVE(st); + return 1; } static int @@ -2618,7 +2621,7 @@ symtable_visit_annotation(struct symtable *st, expr_ty annotation, void *key) _Py_block_ty current_type = parent_ste->ste_type; if (!symtable_enter_block(st, &_Py_ID(__annotate__), AnnotationBlock, key, LOCATION(annotation))) { - VISIT_QUIT(st, 0); + return 0; } parent_ste->ste_annotation_block = (struct _symtable_entry *)Py_NewRef(st->st_cur); @@ -2632,12 +2635,12 @@ symtable_visit_annotation(struct symtable *st, expr_ty annotation, void *key) } else { if (!symtable_enter_existing_block(st, parent_ste->ste_annotation_block)) { - VISIT_QUIT(st, 0); + return 0; } } VISIT(st, expr, annotation); if (!symtable_exit_block(st)) { - VISIT_QUIT(st, 0); + return 0; } return 1; } @@ -2669,7 +2672,7 @@ symtable_visit_annotations(struct symtable *st, stmt_ty o, arguments_ty a, expr_ _Py_block_ty current_type = st->st_cur->ste_type; if (!symtable_enter_block(st, &_Py_ID(__annotate__), AnnotationBlock, (void *)a, LOCATION(o))) { - VISIT_QUIT(st, 0); + return 0; } if (is_in_class || current_type == ClassBlock) { st->st_cur->ste_can_see_class_scope = 1; @@ -2696,7 +2699,7 @@ symtable_visit_annotations(struct symtable *st, stmt_ty o, arguments_ty a, expr_ VISIT(st, expr, returns); } if (!symtable_exit_block(st)) { - VISIT_QUIT(st, 0); + return 0; } return 1; } @@ -2963,7 +2966,7 @@ symtable_raise_if_comprehension_block(struct symtable *st, expr_ty e) { (type == DictComprehension) ? "'yield' inside dict comprehension" : "'yield' inside generator expression"); SET_ERROR_LOCATION(st->st_filename, LOCATION(e)); - VISIT_QUIT(st, 0); + return 0; } static int From 7a5c4103b094aaf1b65af6de65795d172cfe8fe0 Mon Sep 17 00:00:00 2001 From: Matth-M <93771840+Matth-M@users.noreply.github.com> Date: Sat, 3 Aug 2024 12:18:59 +0100 Subject: [PATCH 61/70] Doc: Improve wording of ``os.path.commonpath()`` (#122627) Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com> --- Doc/library/os.path.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Doc/library/os.path.rst b/Doc/library/os.path.rst index ac24bf05c289b6..ecbbc1d7605f9f 100644 --- a/Doc/library/os.path.rst +++ b/Doc/library/os.path.rst @@ -81,7 +81,7 @@ the :mod:`glob` module.) Return the longest common sub-path of each pathname in the iterable *paths*. Raise :exc:`ValueError` if *paths* contain both absolute - and relative pathnames, the *paths* are on the different drives or + and relative pathnames, if *paths* are on different drives, or if *paths* is empty. Unlike :func:`commonprefix`, this returns a valid path. From d91ac525ef166edc0083acf5a96f81b87324fe7f Mon Sep 17 00:00:00 2001 From: Sergey B Kirpichev Date: Sat, 3 Aug 2024 14:20:10 +0300 Subject: [PATCH 62/70] gh-122613: Document PyLong_GetInfo() (part of Limited API) (GH-#122280) --- Doc/c-api/long.rst | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/Doc/c-api/long.rst b/Doc/c-api/long.rst index 42162914c0aec8..9f2c48d98b8344 100644 --- a/Doc/c-api/long.rst +++ b/Doc/c-api/long.rst @@ -514,6 +514,17 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. .. versionadded:: 3.14 +.. c:function:: PyObject* PyLong_GetInfo(void) + + On success, return a read only :term:`named tuple`, that holds + information about Python's internal representation of integers. + See :data:`sys.int_info` for description of individual fields. + + On failure, return ``NULL`` with an exception set. + + .. versionadded:: 3.1 + + .. c:function:: int PyUnstable_Long_IsCompact(const PyLongObject* op) Return 1 if *op* is compact, 0 otherwise. From cc6839a1810290e483e5d5f0786d9b46c4294d47 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Sat, 3 Aug 2024 12:52:21 +0100 Subject: [PATCH 63/70] GH-109408: Stop running patchcheck in CI (#109895) --- .azure-pipelines/posix-steps.yml | 6 -- .azure-pipelines/pr.yml | 17 ----- Tools/patchcheck/patchcheck.py | 120 ++++--------------------------- 3 files changed, 12 insertions(+), 131 deletions(-) diff --git a/.azure-pipelines/posix-steps.yml b/.azure-pipelines/posix-steps.yml index e23c7b1dcb55c1..99fb7f3b1105b6 100644 --- a/.azure-pipelines/posix-steps.yml +++ b/.azure-pipelines/posix-steps.yml @@ -18,9 +18,3 @@ steps: - script: make pythoninfo displayName: 'Display build info' - -- script: | - git fetch origin - ./python Tools/patchcheck/patchcheck.py --ci true - displayName: 'Run patchcheck.py' - condition: and(succeeded(), eq(variables['Build.Reason'], 'PullRequest')) diff --git a/.azure-pipelines/pr.yml b/.azure-pipelines/pr.yml index 335a4b407cb83c..433396778ab891 100644 --- a/.azure-pipelines/pr.yml +++ b/.azure-pipelines/pr.yml @@ -9,20 +9,3 @@ jobs: steps: - template: ./prebuild-checks.yml - - -- job: Ubuntu_Patchcheck - displayName: Ubuntu patchcheck - dependsOn: Prebuild - condition: and(succeeded(), eq(dependencies.Prebuild.outputs['tests.run'], 'true')) - - pool: - vmImage: ubuntu-22.04 - - variables: - testRunTitle: '$(system.pullRequest.TargetBranch)-linux' - testRunPlatform: linux - openssl_version: 1.1.1u - - steps: - - template: ./posix-steps.yml diff --git a/Tools/patchcheck/patchcheck.py b/Tools/patchcheck/patchcheck.py index fc338f389ca6d9..0dcf6ef844a048 100755 --- a/Tools/patchcheck/patchcheck.py +++ b/Tools/patchcheck/patchcheck.py @@ -5,9 +5,6 @@ import subprocess import sysconfig -import reindent -import untabify - def get_python_source_dir(): src_dir = sysconfig.get_config_var('abs_srcdir') @@ -16,13 +13,6 @@ def get_python_source_dir(): return os.path.abspath(src_dir) -# Excluded directories which are copies of external libraries: -# don't check their coding style -EXCLUDE_DIRS = [ - os.path.join('Modules', '_decimal', 'libmpdec'), - os.path.join('Modules', 'expat'), - os.path.join('Modules', 'zlib'), - ] SRCDIR = get_python_source_dir() @@ -153,62 +143,7 @@ def changed_files(base_branch=None): else: sys.exit('need a git checkout to get modified files') - filenames2 = [] - for filename in filenames: - # Normalize the path to be able to match using .startswith() - filename = os.path.normpath(filename) - if any(filename.startswith(path) for path in EXCLUDE_DIRS): - # Exclude the file - continue - filenames2.append(filename) - - return filenames2 - - -def report_modified_files(file_paths): - count = len(file_paths) - if count == 0: - return n_files_str(count) - else: - lines = [f"{n_files_str(count)}:"] - for path in file_paths: - lines.append(f" {path}") - return "\n".join(lines) - - -#: Python files that have tabs by design: -_PYTHON_FILES_WITH_TABS = frozenset({ - 'Tools/c-analyzer/cpython/_parser.py', -}) - - -@status("Fixing Python file whitespace", info=report_modified_files) -def normalize_whitespace(file_paths): - """Make sure that the whitespace for .py files have been normalized.""" - reindent.makebackup = False # No need to create backups. - fixed = [ - path for path in file_paths - if ( - path.endswith('.py') - and path not in _PYTHON_FILES_WITH_TABS - and reindent.check(os.path.join(SRCDIR, path)) - ) - ] - return fixed - - -@status("Fixing C file whitespace", info=report_modified_files) -def normalize_c_whitespace(file_paths): - """Report if any C files """ - fixed = [] - for path in file_paths: - abspath = os.path.join(SRCDIR, path) - with open(abspath, 'r') as f: - if '\t' not in f.read(): - continue - untabify.process(abspath, 8, verbose=False) - fixed.append(path) - return fixed + return list(map(os.path.normpath, filenames)) @status("Docs modified", modal=True) @@ -248,40 +183,14 @@ def regenerated_pyconfig_h_in(file_paths): return "not needed" -def ci(pull_request): - if pull_request == 'false': - print('Not a pull request; skipping') - return - base_branch = get_base_branch() - file_paths = changed_files(base_branch) - python_files = [fn for fn in file_paths if fn.endswith('.py')] - c_files = [fn for fn in file_paths if fn.endswith(('.c', '.h'))] - fixed = [] - fixed.extend(normalize_whitespace(python_files)) - fixed.extend(normalize_c_whitespace(c_files)) - if not fixed: - print('No whitespace issues found') - else: - count = len(fixed) - print(f'Please fix the {n_files_str(count)} with whitespace issues') - print('(on Unix you can run `make patchcheck` to make the fixes)') - sys.exit(1) - - def main(): base_branch = get_base_branch() file_paths = changed_files(base_branch) - python_files = [fn for fn in file_paths if fn.endswith('.py')] - c_files = [fn for fn in file_paths if fn.endswith(('.c', '.h'))] - doc_files = [fn for fn in file_paths if fn.startswith('Doc') and - fn.endswith(('.rst', '.inc'))] + has_doc_files = any(fn for fn in file_paths if fn.startswith('Doc') and + fn.endswith(('.rst', '.inc'))) misc_files = {p for p in file_paths if p.startswith('Misc')} - # PEP 8 whitespace rules enforcement. - normalize_whitespace(python_files) - # C rules enforcement. - normalize_c_whitespace(c_files) # Docs updated. - docs_modified(doc_files) + docs_modified(has_doc_files) # Misc/ACKS changed. credit_given(misc_files) # Misc/NEWS changed. @@ -292,19 +201,14 @@ def main(): regenerated_pyconfig_h_in(file_paths) # Test suite run and passed. - if python_files or c_files: - end = " and check for refleaks?" if c_files else "?" - print() - print("Did you run the test suite" + end) + has_c_files = any(fn for fn in file_paths if fn.endswith(('.c', '.h'))) + has_python_files = any(fn for fn in file_paths if fn.endswith('.py')) + print() + if has_c_files: + print("Did you run the test suite and check for refleaks?") + elif has_python_files: + print("Did you run the test suite?") if __name__ == '__main__': - import argparse - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument('--ci', - help='Perform pass/fail checks') - args = parser.parse_args() - if args.ci: - ci(args.ci) - else: - main() + main() From 50b36037518a8e7f7eee39b597d56b5b2756eb86 Mon Sep 17 00:00:00 2001 From: neonene <53406459+neonene@users.noreply.github.com> Date: Sat, 3 Aug 2024 22:15:26 +0900 Subject: [PATCH 64/70] gh-122334: Fix test_embed failure when missing _ssl module (GH-122630) Co-authored-by: Wulian233 <1055917385@qq.com> --- Lib/test/test_embed.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py index ab112d6be85b46..916a9a79887dfc 100644 --- a/Lib/test/test_embed.py +++ b/Lib/test/test_embed.py @@ -465,8 +465,12 @@ def test_getargs_reset_static_parser(self): # Test _PyArg_Parser initializations via _PyArg_UnpackKeywords() # https://github.com/python/cpython/issues/122334 code = textwrap.dedent(""" - import _ssl - _ssl.txt2obj(txt='1.3') + try: + import _ssl + except ModuleNotFoundError: + _ssl = None + if _ssl is not None: + _ssl.txt2obj(txt='1.3') print('1') import _queue From 06eb9701a182b4720dfa8766cb41cc5a3728a8b9 Mon Sep 17 00:00:00 2001 From: scottwoodall Date: Sat, 3 Aug 2024 09:24:29 -0400 Subject: [PATCH 65/70] Doc: Grammar fix in ``library/ssl.rst``, 'Verifying certificates' (#122646) --- Doc/library/ssl.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst index 7d4c1f0f2de347..ad441c528d0d66 100644 --- a/Doc/library/ssl.rst +++ b/Doc/library/ssl.rst @@ -2710,7 +2710,7 @@ Verifying certificates When calling the :class:`SSLContext` constructor directly, :const:`CERT_NONE` is the default. Since it does not authenticate the other -peer, it can be insecure, especially in client mode where most of time you +peer, it can be insecure, especially in client mode where most of the time you would like to ensure the authenticity of the server you're talking to. Therefore, when in client mode, it is highly recommended to use :const:`CERT_REQUIRED`. However, it is in itself not sufficient; you also From 1573d90ce17f27fd30a251de897a35bf598d2655 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Sat, 3 Aug 2024 16:11:48 +0100 Subject: [PATCH 66/70] gh-109408: Remove ``.azure-pipelines/pr.yml`` (#122643) This no longer does anything useful, beyond wasting Azure resources. --- .azure-pipelines/posix-deps-apt.sh | 27 --------------------------- .azure-pipelines/posix-steps.yml | 20 -------------------- .azure-pipelines/pr.yml | 11 ----------- 3 files changed, 58 deletions(-) delete mode 100755 .azure-pipelines/posix-deps-apt.sh delete mode 100644 .azure-pipelines/posix-steps.yml delete mode 100644 .azure-pipelines/pr.yml diff --git a/.azure-pipelines/posix-deps-apt.sh b/.azure-pipelines/posix-deps-apt.sh deleted file mode 100755 index e0f4ca5d8d8e88..00000000000000 --- a/.azure-pipelines/posix-deps-apt.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/sh -apt-get update - -apt-get -yq install \ - build-essential \ - zlib1g-dev \ - libbz2-dev \ - liblzma-dev \ - libncurses5-dev \ - libreadline6-dev \ - libsqlite3-dev \ - libssl-dev \ - libgdbm-dev \ - tk-dev \ - lzma \ - lzma-dev \ - liblzma-dev \ - libffi-dev \ - uuid-dev \ - xvfb - -if [ ! -z "$1" ] -then - echo ##vso[task.prependpath]$PWD/multissl/openssl/$1 - echo ##vso[task.setvariable variable=OPENSSL_DIR]$PWD/multissl/openssl/$1 - python3 Tools/ssl/multissltests.py --steps=library --base-directory $PWD/multissl --openssl $1 --system Linux -fi diff --git a/.azure-pipelines/posix-steps.yml b/.azure-pipelines/posix-steps.yml deleted file mode 100644 index 99fb7f3b1105b6..00000000000000 --- a/.azure-pipelines/posix-steps.yml +++ /dev/null @@ -1,20 +0,0 @@ -steps: -- checkout: self - clean: true - fetchDepth: 5 - -# Work around a known issue affecting Ubuntu VMs on Pipelines -- script: sudo setfacl -Rb /home/vsts - displayName: 'Workaround ACL issue' - -- script: sudo ./.azure-pipelines/posix-deps-apt.sh $(openssl_version) - displayName: 'Install dependencies' - -- script: ./configure --with-pydebug - displayName: 'Configure CPython (debug)' - -- script: make -j4 - displayName: 'Build CPython' - -- script: make pythoninfo - displayName: 'Display build info' diff --git a/.azure-pipelines/pr.yml b/.azure-pipelines/pr.yml deleted file mode 100644 index 433396778ab891..00000000000000 --- a/.azure-pipelines/pr.yml +++ /dev/null @@ -1,11 +0,0 @@ -pr: ['main', '3.12', '3.11', '3.10', '3.9', '3.8', '3.7'] - -jobs: -- job: Prebuild - displayName: Pre-build checks - - pool: - vmImage: ubuntu-22.04 - - steps: - - template: ./prebuild-checks.yml From 95f5c89b545beaafad73f05a695742da3e90bc41 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Sat, 3 Aug 2024 17:41:26 +0100 Subject: [PATCH 67/70] GH-121970: Fix ``gettext`` for audit events (#122651) --- Doc/tools/extensions/audit_events.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Doc/tools/extensions/audit_events.py b/Doc/tools/extensions/audit_events.py index d0f08522d21ea2..23d82c0f4414bf 100644 --- a/Doc/tools/extensions/audit_events.py +++ b/Doc/tools/extensions/audit_events.py @@ -149,6 +149,7 @@ def run(self) -> list[nodes.paragraph]: node = nodes.paragraph("", classes=["audit-hook"], ids=ids) self.set_source_info(node) if self.content: + node.rawsource = '\n'.join(self.content) # for gettext self.state.nested_parse(self.content, self.content_offset, node) else: num_args = min(2, len(args)) @@ -156,6 +157,7 @@ def run(self) -> list[nodes.paragraph]: name=f"``{name}``", args=", ".join(f"``{a}``" for a in args), ) + node.rawsource = text # for gettext parsed, messages = self.state.inline_text(text, self.lineno) node += parsed node += messages From 151934a324789c58cca9c7bbd6753d735454df5a Mon Sep 17 00:00:00 2001 From: sobolevn Date: Sun, 4 Aug 2024 00:55:47 +0300 Subject: [PATCH 68/70] gh-122623: Improve `c-api/bytearray.rst` with error handling info (#122624) --- Doc/c-api/bytearray.rst | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/Doc/c-api/bytearray.rst b/Doc/c-api/bytearray.rst index 456f7d89bca03c..9045689a6be567 100644 --- a/Doc/c-api/bytearray.rst +++ b/Doc/c-api/bytearray.rst @@ -42,17 +42,22 @@ Direct API functions Return a new bytearray object from any object, *o*, that implements the :ref:`buffer protocol `. + On failure, return ``NULL`` with an exception set. + .. c:function:: PyObject* PyByteArray_FromStringAndSize(const char *string, Py_ssize_t len) - Create a new bytearray object from *string* and its length, *len*. On - failure, ``NULL`` is returned. + Create a new bytearray object from *string* and its length, *len*. + + On failure, return ``NULL`` with an exception set. .. c:function:: PyObject* PyByteArray_Concat(PyObject *a, PyObject *b) Concat bytearrays *a* and *b* and return a new bytearray with the result. + On failure, return ``NULL`` with an exception set. + .. c:function:: Py_ssize_t PyByteArray_Size(PyObject *bytearray) From 3462a80d2cf37a63fe43f46f64a8c9823f84531d Mon Sep 17 00:00:00 2001 From: Sergey B Kirpichev Date: Sun, 4 Aug 2024 11:53:17 +0300 Subject: [PATCH 69/70] gh-121889: cmath.acosh(0+nanj) returns nan+pi/2j (#121892) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit As per C11 DR#471 (adjusted resolution accepted for C17), cacosh (0 + iNaN) should return NaN ± i pi/2, not NaN + iNaN. This patch fixes cmath's code to do same. --- Lib/test/mathdata/cmath_testcases.txt | 4 ++-- .../Library/2024-07-17-09-23-03.gh-issue-121889.6se9jS.rst | 1 + Modules/cmathmodule.c | 4 ++-- 3 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 Misc/NEWS.d/next/Library/2024-07-17-09-23-03.gh-issue-121889.6se9jS.rst diff --git a/Lib/test/mathdata/cmath_testcases.txt b/Lib/test/mathdata/cmath_testcases.txt index 0165e17634f41c..0c0d2d703f07f8 100644 --- a/Lib/test/mathdata/cmath_testcases.txt +++ b/Lib/test/mathdata/cmath_testcases.txt @@ -371,9 +371,9 @@ acosh1002 acosh 0.0 inf -> inf 1.5707963267948966 acosh1003 acosh 2.3 inf -> inf 1.5707963267948966 acosh1004 acosh -0.0 inf -> inf 1.5707963267948966 acosh1005 acosh -2.3 inf -> inf 1.5707963267948966 -acosh1006 acosh 0.0 nan -> nan nan +acosh1006 acosh 0.0 nan -> nan 1.5707963267948966 ignore-imag-sign acosh1007 acosh 2.3 nan -> nan nan -acosh1008 acosh -0.0 nan -> nan nan +acosh1008 acosh -0.0 nan -> nan 1.5707963267948966 ignore-imag-sign acosh1009 acosh -2.3 nan -> nan nan acosh1010 acosh -inf 0.0 -> inf 3.1415926535897931 acosh1011 acosh -inf 2.3 -> inf 3.1415926535897931 diff --git a/Misc/NEWS.d/next/Library/2024-07-17-09-23-03.gh-issue-121889.6se9jS.rst b/Misc/NEWS.d/next/Library/2024-07-17-09-23-03.gh-issue-121889.6se9jS.rst new file mode 100644 index 00000000000000..a7babe0580b3e4 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-07-17-09-23-03.gh-issue-121889.6se9jS.rst @@ -0,0 +1 @@ +Adjusts ``cmath.acosh(complex('0+nanj'))`` for recent C standards. diff --git a/Modules/cmathmodule.c b/Modules/cmathmodule.c index bf86a211bcb188..16ac663bdb9949 100644 --- a/Modules/cmathmodule.c +++ b/Modules/cmathmodule.c @@ -1259,8 +1259,8 @@ cmath_exec(PyObject *mod) INIT_SPECIAL_VALUES(acosh_special_values, { C(INF,-P34) C(INF,-P) C(INF,-P) C(INF,P) C(INF,P) C(INF,P34) C(INF,N) C(INF,-P12) C(U,U) C(U,U) C(U,U) C(U,U) C(INF,P12) C(N,N) - C(INF,-P12) C(U,U) C(0.,-P12) C(0.,P12) C(U,U) C(INF,P12) C(N,N) - C(INF,-P12) C(U,U) C(0.,-P12) C(0.,P12) C(U,U) C(INF,P12) C(N,N) + C(INF,-P12) C(U,U) C(0.,-P12) C(0.,P12) C(U,U) C(INF,P12) C(N,P12) + C(INF,-P12) C(U,U) C(0.,-P12) C(0.,P12) C(U,U) C(INF,P12) C(N,P12) C(INF,-P12) C(U,U) C(U,U) C(U,U) C(U,U) C(INF,P12) C(N,N) C(INF,-P14) C(INF,-0.) C(INF,-0.) C(INF,0.) C(INF,0.) C(INF,P14) C(INF,N) C(INF,N) C(N,N) C(N,N) C(N,N) C(N,N) C(INF,N) C(N,N) From e6fad7a0e3d824f4a3c9cd71a48208880606d705 Mon Sep 17 00:00:00 2001 From: Sergey B Kirpichev Date: Sun, 4 Aug 2024 12:05:30 +0300 Subject: [PATCH 70/70] =?UTF-8?q?gh-122637:=20fix=20tanh(=C2=B10+infj)=20a?= =?UTF-8?q?nd=20tanh(=C2=B10+nanj)=20to=20return=20=C2=B10+nanj=20(#122638?= =?UTF-8?q?)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit As per C11 DR#471, ctanh (0 + i NaN) and ctanh (0 + i Inf) should return 0 + i NaN (with "invalid" exception in the second case). This has corresponding implications for ctan(z), as its errors and special cases are handled as if the operation is implemented by -i*ctanh(i*z). This patch fixes cmath's code to do same. Glibs patch: https://sourceware.org/git/?p=glibc.git;a=commitdiff;h=d15e83c5f5231d971472b5ffc9219d54056ca0f1 --- Lib/test/mathdata/cmath_testcases.txt | 24 +++++++++---------- ...-08-03-06-51-08.gh-issue-122637.gpas8J.rst | 1 + Modules/cmathmodule.c | 4 ++-- 3 files changed, 15 insertions(+), 14 deletions(-) create mode 100644 Misc/NEWS.d/next/Library/2024-08-03-06-51-08.gh-issue-122637.gpas8J.rst diff --git a/Lib/test/mathdata/cmath_testcases.txt b/Lib/test/mathdata/cmath_testcases.txt index 0c0d2d703f07f8..7b98b5a2998413 100644 --- a/Lib/test/mathdata/cmath_testcases.txt +++ b/Lib/test/mathdata/cmath_testcases.txt @@ -1992,9 +1992,9 @@ tanh0065 tanh 1.797e+308 0.0 -> 1.0 0.0 --special values tanh1000 tanh 0.0 0.0 -> 0.0 0.0 -tanh1001 tanh 0.0 inf -> nan nan invalid +tanh1001 tanh 0.0 inf -> 0.0 nan invalid tanh1002 tanh 2.3 inf -> nan nan invalid -tanh1003 tanh 0.0 nan -> nan nan +tanh1003 tanh 0.0 nan -> 0.0 nan tanh1004 tanh 2.3 nan -> nan nan tanh1005 tanh inf 0.0 -> 1.0 0.0 tanh1006 tanh inf 0.7 -> 1.0 0.0 @@ -2009,7 +2009,7 @@ tanh1014 tanh nan 2.3 -> nan nan tanh1015 tanh nan inf -> nan nan tanh1016 tanh nan nan -> nan nan tanh1017 tanh 0.0 -0.0 -> 0.0 -0.0 -tanh1018 tanh 0.0 -inf -> nan nan invalid +tanh1018 tanh 0.0 -inf -> 0.0 nan invalid tanh1019 tanh 2.3 -inf -> nan nan invalid tanh1020 tanh inf -0.0 -> 1.0 -0.0 tanh1021 tanh inf -0.7 -> 1.0 -0.0 @@ -2022,9 +2022,9 @@ tanh1027 tanh nan -0.0 -> nan -0.0 tanh1028 tanh nan -2.3 -> nan nan tanh1029 tanh nan -inf -> nan nan tanh1030 tanh -0.0 -0.0 -> -0.0 -0.0 -tanh1031 tanh -0.0 -inf -> nan nan invalid +tanh1031 tanh -0.0 -inf -> -0.0 nan invalid tanh1032 tanh -2.3 -inf -> nan nan invalid -tanh1033 tanh -0.0 nan -> nan nan +tanh1033 tanh -0.0 nan -> -0.0 nan tanh1034 tanh -2.3 nan -> nan nan tanh1035 tanh -inf -0.0 -> -1.0 -0.0 tanh1036 tanh -inf -0.7 -> -1.0 -0.0 @@ -2035,7 +2035,7 @@ tanh1040 tanh -inf -3.5 -> -1.0 -0.0 tanh1041 tanh -inf -inf -> -1.0 0.0 ignore-imag-sign tanh1042 tanh -inf nan -> -1.0 0.0 ignore-imag-sign tanh1043 tanh -0.0 0.0 -> -0.0 0.0 -tanh1044 tanh -0.0 inf -> nan nan invalid +tanh1044 tanh -0.0 inf -> -0.0 nan invalid tanh1045 tanh -2.3 inf -> nan nan invalid tanh1046 tanh -inf 0.0 -> -1.0 0.0 tanh1047 tanh -inf 0.7 -> -1.0 0.0 @@ -2307,9 +2307,9 @@ tan0066 tan -8.79645943005142 0.0 -> 0.7265425280053614098 0.0 -- special values tan1000 tan -0.0 0.0 -> -0.0 0.0 -tan1001 tan -inf 0.0 -> nan nan invalid +tan1001 tan -inf 0.0 -> nan 0.0 invalid tan1002 tan -inf 2.2999999999999998 -> nan nan invalid -tan1003 tan nan 0.0 -> nan nan +tan1003 tan nan 0.0 -> nan 0.0 tan1004 tan nan 2.2999999999999998 -> nan nan tan1005 tan -0.0 inf -> -0.0 1.0 tan1006 tan -0.69999999999999996 inf -> -0.0 1.0 @@ -2324,7 +2324,7 @@ tan1014 tan -2.2999999999999998 nan -> nan nan tan1015 tan -inf nan -> nan nan tan1016 tan nan nan -> nan nan tan1017 tan 0.0 0.0 -> 0.0 0.0 -tan1018 tan inf 0.0 -> nan nan invalid +tan1018 tan inf 0.0 -> nan 0.0 invalid tan1019 tan inf 2.2999999999999998 -> nan nan invalid tan1020 tan 0.0 inf -> 0.0 1.0 tan1021 tan 0.69999999999999996 inf -> 0.0 1.0 @@ -2337,9 +2337,9 @@ tan1027 tan 0.0 nan -> 0.0 nan tan1028 tan 2.2999999999999998 nan -> nan nan tan1029 tan inf nan -> nan nan tan1030 tan 0.0 -0.0 -> 0.0 -0.0 -tan1031 tan inf -0.0 -> nan nan invalid +tan1031 tan inf -0.0 -> nan -0.0 invalid tan1032 tan inf -2.2999999999999998 -> nan nan invalid -tan1033 tan nan -0.0 -> nan nan +tan1033 tan nan -0.0 -> nan -0.0 tan1034 tan nan -2.2999999999999998 -> nan nan tan1035 tan 0.0 -inf -> 0.0 -1.0 tan1036 tan 0.69999999999999996 -inf -> 0.0 -1.0 @@ -2350,7 +2350,7 @@ tan1040 tan 3.5 -inf -> 0.0 -1.0 tan1041 tan inf -inf -> -0.0 -1.0 ignore-real-sign tan1042 tan nan -inf -> -0.0 -1.0 ignore-real-sign tan1043 tan -0.0 -0.0 -> -0.0 -0.0 -tan1044 tan -inf -0.0 -> nan nan invalid +tan1044 tan -inf -0.0 -> nan -0.0 invalid tan1045 tan -inf -2.2999999999999998 -> nan nan invalid tan1046 tan -0.0 -inf -> -0.0 -1.0 tan1047 tan -0.69999999999999996 -inf -> -0.0 -1.0 diff --git a/Misc/NEWS.d/next/Library/2024-08-03-06-51-08.gh-issue-122637.gpas8J.rst b/Misc/NEWS.d/next/Library/2024-08-03-06-51-08.gh-issue-122637.gpas8J.rst new file mode 100644 index 00000000000000..2ded33d75b35bd --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-08-03-06-51-08.gh-issue-122637.gpas8J.rst @@ -0,0 +1 @@ +Adjust ``cmath.tanh(nanj)`` and ``cmath.tanh(infj)`` for recent C standards. diff --git a/Modules/cmathmodule.c b/Modules/cmathmodule.c index 16ac663bdb9949..3c7f0bb6453ef0 100644 --- a/Modules/cmathmodule.c +++ b/Modules/cmathmodule.c @@ -1339,8 +1339,8 @@ cmath_exec(PyObject *mod) INIT_SPECIAL_VALUES(tanh_special_values, { C(-1.,0.) C(U,U) C(-1.,-0.) C(-1.,0.) C(U,U) C(-1.,0.) C(-1.,0.) C(N,N) C(U,U) C(U,U) C(U,U) C(U,U) C(N,N) C(N,N) - C(N,N) C(U,U) C(-0.,-0.) C(-0.,0.) C(U,U) C(N,N) C(N,N) - C(N,N) C(U,U) C(0.,-0.) C(0.,0.) C(U,U) C(N,N) C(N,N) + C(-0.0,N) C(U,U) C(-0.,-0.) C(-0.,0.) C(U,U) C(-0.0,N) C(-0.,N) + C(0.0,N) C(U,U) C(0.,-0.) C(0.,0.) C(U,U) C(0.0,N) C(0.,N) C(N,N) C(U,U) C(U,U) C(U,U) C(U,U) C(N,N) C(N,N) C(1.,0.) C(U,U) C(1.,-0.) C(1.,0.) C(U,U) C(1.,0.) C(1.,0.) C(N,N) C(N,N) C(N,-0.) C(N,0.) C(N,N) C(N,N) C(N,N)