diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 3422ef835279bc..d40519e40d3cc2 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -7,6 +7,9 @@ # GitHub .github/** @ezio-melotti @hugovk +# pre-commit +.pre-commit-config.yaml @hugovk @AlexWaygood + # Build system configure* @erlend-aasland @corona10 diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 00000000000000..4481ea80bfd936 --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,22 @@ +name: Lint + +on: [push, pull_request, workflow_dispatch] + +permissions: + contents: read + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + lint: + runs-on: ubuntu-latest + timeout-minutes: 10 + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: "3.x" + - uses: pre-commit/action@v3.0.0 diff --git a/.github/workflows/require-pr-label.yml b/.github/workflows/require-pr-label.yml index 916bbeb4352734..88aaea039f04f4 100644 --- a/.github/workflows/require-pr-label.yml +++ b/.github/workflows/require-pr-label.yml @@ -4,6 +4,10 @@ on: pull_request: types: [opened, reopened, labeled, unlabeled, synchronize] +permissions: + issues: read + pull-requests: read + jobs: label: name: DO-NOT-MERGE / unresolved review diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000000000..808622f19a3dbf --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,7 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: check-yaml + - id: trailing-whitespace + types_or: [c, python, rst] diff --git a/Doc/c-api/bytearray.rst b/Doc/c-api/bytearray.rst index 4bf3cfe100cd01..456f7d89bca03c 100644 --- a/Doc/c-api/bytearray.rst +++ b/Doc/c-api/bytearray.rst @@ -5,7 +5,7 @@ Byte Array Objects ------------------ -.. index:: object: bytearray +.. index:: pair: object; bytearray .. c:type:: PyByteArrayObject diff --git a/Doc/c-api/bytes.rst b/Doc/c-api/bytes.rst index d62962cab45f6b..9f48f2ffafe170 100644 --- a/Doc/c-api/bytes.rst +++ b/Doc/c-api/bytes.rst @@ -8,7 +8,7 @@ Bytes Objects These functions raise :exc:`TypeError` when expecting a bytes parameter and called with a non-bytes parameter. -.. index:: object: bytes +.. index:: pair: object; bytes .. c:type:: PyBytesObject diff --git a/Doc/c-api/capsule.rst b/Doc/c-api/capsule.rst index 1c8f432505ef68..427ed959c58568 100644 --- a/Doc/c-api/capsule.rst +++ b/Doc/c-api/capsule.rst @@ -5,7 +5,7 @@ Capsules -------- -.. index:: object: Capsule +.. index:: pair: object; Capsule Refer to :ref:`using-capsules` for more information on using these objects. diff --git a/Doc/c-api/complex.rst b/Doc/c-api/complex.rst index 9228ce85200023..344da903da4c1a 100644 --- a/Doc/c-api/complex.rst +++ b/Doc/c-api/complex.rst @@ -5,7 +5,7 @@ Complex Number Objects ---------------------- -.. index:: object: complex number +.. index:: pair: object; complex number Python's complex number objects are implemented as two distinct types when viewed from the C API: one is the Python object exposed to Python programs, and diff --git a/Doc/c-api/concrete.rst b/Doc/c-api/concrete.rst index 8d3124a12fa9d2..880f7b15ce68e8 100644 --- a/Doc/c-api/concrete.rst +++ b/Doc/c-api/concrete.rst @@ -40,7 +40,7 @@ This section describes Python type objects and the singleton object ``None``. Numeric Objects =============== -.. index:: object: numeric +.. index:: pair: object; numeric .. toctree:: @@ -55,7 +55,7 @@ Numeric Objects Sequence Objects ================ -.. index:: object: sequence +.. index:: pair: object; sequence Generic operations on sequence objects were discussed in the previous chapter; this section deals with the specific kinds of sequence objects that are @@ -77,7 +77,7 @@ intrinsic to the Python language. Container Objects ================= -.. index:: object: mapping +.. index:: pair: object; mapping .. toctree:: diff --git a/Doc/c-api/dict.rst b/Doc/c-api/dict.rst index b9f84cea785644..0ca8ad624b2034 100644 --- a/Doc/c-api/dict.rst +++ b/Doc/c-api/dict.rst @@ -5,7 +5,7 @@ Dictionary Objects ------------------ -.. index:: object: dictionary +.. index:: pair: object; dictionary .. c:type:: PyDictObject @@ -154,7 +154,7 @@ Dictionary Objects .. c:function:: Py_ssize_t PyDict_Size(PyObject *p) - .. index:: builtin: len + .. index:: pair: built-in function; len Return the number of items in the dictionary. This is equivalent to ``len(p)`` on a dictionary. diff --git a/Doc/c-api/exceptions.rst b/Doc/c-api/exceptions.rst index 49d2f18d4573b0..4ed96f01dbbc3e 100644 --- a/Doc/c-api/exceptions.rst +++ b/Doc/c-api/exceptions.rst @@ -602,7 +602,7 @@ Signal Handling .. c:function:: int PyErr_CheckSignals() .. index:: - module: signal + pair: module; signal single: SIGINT single: KeyboardInterrupt (built-in exception) @@ -633,7 +633,7 @@ Signal Handling .. c:function:: void PyErr_SetInterrupt() .. index:: - module: signal + pair: module; signal single: SIGINT single: KeyboardInterrupt (built-in exception) @@ -648,7 +648,7 @@ Signal Handling .. c:function:: int PyErr_SetInterruptEx(int signum) .. index:: - module: signal + pair: module; signal single: KeyboardInterrupt (built-in exception) Simulate the effect of a signal arriving. The next time diff --git a/Doc/c-api/file.rst b/Doc/c-api/file.rst index 58ed58e5466859..f32ecba9f27029 100644 --- a/Doc/c-api/file.rst +++ b/Doc/c-api/file.rst @@ -5,7 +5,7 @@ File Objects ------------ -.. index:: object: file +.. index:: pair: object; file These APIs are a minimal emulation of the Python 2 C API for built-in file objects, which used to rely on the buffered I/O (:c:expr:`FILE*`) support diff --git a/Doc/c-api/float.rst b/Doc/c-api/float.rst index 023b12c20b7c83..05b2d100d575cb 100644 --- a/Doc/c-api/float.rst +++ b/Doc/c-api/float.rst @@ -5,7 +5,7 @@ Floating Point Objects ---------------------- -.. index:: object: floating point +.. index:: pair: object; floating point .. c:type:: PyFloatObject diff --git a/Doc/c-api/function.rst b/Doc/c-api/function.rst index 947ed70404081b..5857dba82c11c6 100644 --- a/Doc/c-api/function.rst +++ b/Doc/c-api/function.rst @@ -5,7 +5,7 @@ Function Objects ---------------- -.. index:: object: function +.. index:: pair: object; function There are a few functions specific to Python functions. diff --git a/Doc/c-api/gcsupport.rst b/Doc/c-api/gcsupport.rst index cb5d64a50487fe..c3260a21bc7f8b 100644 --- a/Doc/c-api/gcsupport.rst +++ b/Doc/c-api/gcsupport.rst @@ -59,12 +59,31 @@ rules: Analogous to :c:func:`PyObject_New` but for container objects with the :const:`Py_TPFLAGS_HAVE_GC` flag set. - .. c:function:: TYPE* PyObject_GC_NewVar(TYPE, PyTypeObject *type, Py_ssize_t size) Analogous to :c:func:`PyObject_NewVar` but for container objects with the :const:`Py_TPFLAGS_HAVE_GC` flag set. +.. c:function:: PyObject* PyUnstable_Object_GC_NewWithExtraData(PyTypeObject *type, size_t extra_size) + + Analogous to :c:func:`PyObject_GC_New` but allocates *extra_size* + bytes at the end of the object (at offset + :c:member:`~PyTypeObject.tp_basicsize`). + The allocated memory is initialized to zeros, + except for the :c:type:`Python object header `. + + The extra data will be deallocated with the object, but otherwise it is + not managed by Python. + + .. warning:: + The function is marked as unstable because the final mechanism + for reserving extra data after an instance is not yet decided. + For allocating a variable number of fields, prefer using + :c:type:`PyVarObject` and :c:member:`~PyTypeObject.tp_itemsize` + instead. + + .. versionadded:: 3.12 + .. c:function:: TYPE* PyObject_GC_Resize(TYPE, PyVarObject *op, Py_ssize_t newsize) diff --git a/Doc/c-api/import.rst b/Doc/c-api/import.rst index 474a64800044d0..79843ba521ab93 100644 --- a/Doc/c-api/import.rst +++ b/Doc/c-api/import.rst @@ -41,7 +41,7 @@ Importing Modules .. c:function:: PyObject* PyImport_ImportModuleEx(const char *name, PyObject *globals, PyObject *locals, PyObject *fromlist) - .. index:: builtin: __import__ + .. index:: pair: built-in function; __import__ Import a module. This is best described by referring to the built-in Python function :func:`__import__`. @@ -120,7 +120,7 @@ Importing Modules .. c:function:: PyObject* PyImport_ExecCodeModule(const char *name, PyObject *co) - .. index:: builtin: compile + .. index:: pair: built-in function; compile Given a module name (possibly of the form ``package.module``) and a code object read from a Python bytecode file or obtained from the built-in function @@ -186,10 +186,10 @@ Importing Modules .. versionadded:: 3.2 .. versionchanged:: 3.3 - Uses :func:`imp.source_from_cache()` in calculating the source path if + Uses :func:`!imp.source_from_cache()` in calculating the source path if only the bytecode path is provided. .. versionchanged:: 3.12 - No longer uses the removed ``imp`` module. + No longer uses the removed :mod:`!imp` module. .. c:function:: long PyImport_GetMagicNumber() diff --git a/Doc/c-api/init.rst b/Doc/c-api/init.rst index 38e324fb6409bc..26762969ef8eba 100644 --- a/Doc/c-api/init.rst +++ b/Doc/c-api/init.rst @@ -336,9 +336,9 @@ Initializing and finalizing the interpreter single: PyEval_InitThreads() single: modules (in module sys) single: path (in module sys) - module: builtins - module: __main__ - module: sys + pair: module; builtins + pair: module; __main__ + pair: module; sys triple: module; search; path single: PySys_SetArgv() single: PySys_SetArgvEx() @@ -1051,7 +1051,7 @@ code, or when embedding the Python interpreter: .. deprecated:: 3.9 - .. index:: module: _thread + .. index:: pair: module; _thread .. c:function:: int PyEval_ThreadsInitialized() @@ -1494,9 +1494,9 @@ function. You can create and destroy them using the following functions: .. c:function:: PyThreadState* Py_NewInterpreter() .. index:: - module: builtins - module: __main__ - module: sys + pair: module; builtins + pair: module; __main__ + pair: module; sys single: stdout (in module sys) single: stderr (in module sys) single: stdin (in module sys) diff --git a/Doc/c-api/intro.rst b/Doc/c-api/intro.rst index acd4e033dfbc4b..8de76e55cd0586 100644 --- a/Doc/c-api/intro.rst +++ b/Doc/c-api/intro.rst @@ -261,7 +261,7 @@ complete listing. Objects, Types and Reference Counts =================================== -.. index:: object: type +.. index:: pair: object; type Most Python/C API functions have one or more arguments as well as a return value of type :c:expr:`PyObject*`. This type is a pointer to an opaque data type @@ -705,9 +705,9 @@ interpreter can only be used after the interpreter has been initialized. .. index:: single: Py_Initialize() - module: builtins - module: __main__ - module: sys + pair: module; builtins + pair: module; __main__ + pair: module; sys triple: module; search; path single: path (in module sys) diff --git a/Doc/c-api/list.rst b/Doc/c-api/list.rst index f9e65354a259f4..dbf35611eccd3e 100644 --- a/Doc/c-api/list.rst +++ b/Doc/c-api/list.rst @@ -5,7 +5,7 @@ List Objects ------------ -.. index:: object: list +.. index:: pair: object; list .. c:type:: PyListObject @@ -45,7 +45,7 @@ List Objects .. c:function:: Py_ssize_t PyList_Size(PyObject *list) - .. index:: builtin: len + .. index:: pair: built-in function; len Return the length of the list object in *list*; this is equivalent to ``len(list)`` on a list object. @@ -138,7 +138,7 @@ List Objects .. c:function:: PyObject* PyList_AsTuple(PyObject *list) - .. index:: builtin: tuple + .. index:: pair: built-in function; tuple Return a new tuple object containing the contents of *list*; equivalent to ``tuple(list)``. diff --git a/Doc/c-api/long.rst b/Doc/c-api/long.rst index 41b5632d23003f..4a71c89ad85d31 100644 --- a/Doc/c-api/long.rst +++ b/Doc/c-api/long.rst @@ -5,8 +5,8 @@ Integer Objects --------------- -.. index:: object: long integer - object: integer +.. index:: pair: object; long integer + pair: object; integer All integers are implemented as "long" integer objects of arbitrary size. diff --git a/Doc/c-api/mapping.rst b/Doc/c-api/mapping.rst index 3c9d282c6d0ab0..cffb0ed50fb77d 100644 --- a/Doc/c-api/mapping.rst +++ b/Doc/c-api/mapping.rst @@ -20,7 +20,7 @@ See also :c:func:`PyObject_GetItem`, :c:func:`PyObject_SetItem` and .. c:function:: Py_ssize_t PyMapping_Size(PyObject *o) Py_ssize_t PyMapping_Length(PyObject *o) - .. index:: builtin: len + .. index:: pair: built-in function; len Returns the number of keys in object *o* on success, and ``-1`` on failure. This is equivalent to the Python expression ``len(o)``. diff --git a/Doc/c-api/memoryview.rst b/Doc/c-api/memoryview.rst index ebd5c7760437bf..2aa43318e7a455 100644 --- a/Doc/c-api/memoryview.rst +++ b/Doc/c-api/memoryview.rst @@ -3,7 +3,7 @@ .. _memoryview-objects: .. index:: - object: memoryview + pair: object; memoryview MemoryView objects ------------------ diff --git a/Doc/c-api/method.rst b/Doc/c-api/method.rst index 6e7e1e21aa93f2..93ad30cd4f7a8d 100644 --- a/Doc/c-api/method.rst +++ b/Doc/c-api/method.rst @@ -5,7 +5,7 @@ Instance Method Objects ----------------------- -.. index:: object: instancemethod +.. index:: pair: object; instancemethod An instance method is a wrapper for a :c:data:`PyCFunction` and the new way to bind a :c:data:`PyCFunction` to a class object. It replaces the former call @@ -47,7 +47,7 @@ to bind a :c:data:`PyCFunction` to a class object. It replaces the former call Method Objects -------------- -.. index:: object: method +.. index:: pair: object; method Methods are bound function objects. Methods are always bound to an instance of a user-defined class. Unbound methods (methods bound to a class object) are diff --git a/Doc/c-api/module.rst b/Doc/c-api/module.rst index c0351c8a6c72aa..230b471d473be7 100644 --- a/Doc/c-api/module.rst +++ b/Doc/c-api/module.rst @@ -5,7 +5,7 @@ Module Objects -------------- -.. index:: object: module +.. index:: pair: object; module .. c:var:: PyTypeObject PyModule_Type diff --git a/Doc/c-api/none.rst b/Doc/c-api/none.rst index 26d2b7aab201ba..b84a16a28ead56 100644 --- a/Doc/c-api/none.rst +++ b/Doc/c-api/none.rst @@ -5,7 +5,7 @@ The ``None`` Object ------------------- -.. index:: object: None +.. index:: pair: object; None Note that the :c:type:`PyTypeObject` for ``None`` is not directly exposed in the Python/C API. Since ``None`` is a singleton, testing for object identity (using diff --git a/Doc/c-api/number.rst b/Doc/c-api/number.rst index 70b91f8c2d0ca1..13d3c5af956905 100644 --- a/Doc/c-api/number.rst +++ b/Doc/c-api/number.rst @@ -64,7 +64,7 @@ Number Protocol .. c:function:: PyObject* PyNumber_Divmod(PyObject *o1, PyObject *o2) - .. index:: builtin: divmod + .. index:: pair: built-in function; divmod See the built-in function :func:`divmod`. Returns ``NULL`` on failure. This is the equivalent of the Python expression ``divmod(o1, o2)``. @@ -72,7 +72,7 @@ Number Protocol .. c:function:: PyObject* PyNumber_Power(PyObject *o1, PyObject *o2, PyObject *o3) - .. index:: builtin: pow + .. index:: pair: built-in function; pow See the built-in function :func:`pow`. Returns ``NULL`` on failure. This is the equivalent of the Python expression ``pow(o1, o2, o3)``, where *o3* is optional. @@ -94,7 +94,7 @@ Number Protocol .. c:function:: PyObject* PyNumber_Absolute(PyObject *o) - .. index:: builtin: abs + .. index:: pair: built-in function; abs Returns the absolute value of *o*, or ``NULL`` on failure. This is the equivalent of the Python expression ``abs(o)``. @@ -192,7 +192,7 @@ Number Protocol .. c:function:: PyObject* PyNumber_InPlacePower(PyObject *o1, PyObject *o2, PyObject *o3) - .. index:: builtin: pow + .. index:: pair: built-in function; pow See the built-in function :func:`pow`. Returns ``NULL`` on failure. The operation is done *in-place* when *o1* supports it. This is the equivalent of the Python @@ -238,7 +238,7 @@ Number Protocol .. c:function:: PyObject* PyNumber_Long(PyObject *o) - .. index:: builtin: int + .. index:: pair: built-in function; int Returns the *o* converted to an integer object on success, or ``NULL`` on failure. This is the equivalent of the Python expression ``int(o)``. @@ -246,7 +246,7 @@ Number Protocol .. c:function:: PyObject* PyNumber_Float(PyObject *o) - .. index:: builtin: float + .. index:: pair: built-in function; float Returns the *o* converted to a float object on success, or ``NULL`` on failure. This is the equivalent of the Python expression ``float(o)``. diff --git a/Doc/c-api/object.rst b/Doc/c-api/object.rst index 0a12bb9e8c54f0..a25ff244c9f07c 100644 --- a/Doc/c-api/object.rst +++ b/Doc/c-api/object.rst @@ -190,7 +190,7 @@ Object Protocol .. c:function:: PyObject* PyObject_Repr(PyObject *o) - .. index:: builtin: repr + .. index:: pair: built-in function; repr Compute a string representation of object *o*. Returns the string representation on success, ``NULL`` on failure. This is the equivalent of the @@ -202,7 +202,7 @@ Object Protocol .. c:function:: PyObject* PyObject_ASCII(PyObject *o) - .. index:: builtin: ascii + .. index:: pair: built-in function; ascii As :c:func:`PyObject_Repr`, compute a string representation of object *o*, but escape the non-ASCII characters in the string returned by @@ -227,7 +227,7 @@ Object Protocol .. c:function:: PyObject* PyObject_Bytes(PyObject *o) - .. index:: builtin: bytes + .. index:: pair: built-in function; bytes Compute a bytes representation of object *o*. ``NULL`` is returned on failure and a bytes object on success. This is equivalent to the Python @@ -278,7 +278,7 @@ Object Protocol .. c:function:: Py_hash_t PyObject_Hash(PyObject *o) - .. index:: builtin: hash + .. index:: pair: built-in function; hash Compute and return the hash value of an object *o*. On failure, return ``-1``. This is the equivalent of the Python expression ``hash(o)``. @@ -312,7 +312,7 @@ Object Protocol .. c:function:: PyObject* PyObject_Type(PyObject *o) - .. index:: builtin: type + .. index:: pair: built-in function; type When *o* is non-``NULL``, returns a type object corresponding to the object type of object *o*. On failure, raises :exc:`SystemError` and returns ``NULL``. This @@ -332,7 +332,7 @@ Object Protocol .. c:function:: Py_ssize_t PyObject_Size(PyObject *o) Py_ssize_t PyObject_Length(PyObject *o) - .. index:: builtin: len + .. index:: pair: built-in function; len Return the length of object *o*. If the object *o* provides either the sequence and mapping protocols, the sequence length is returned. On error, ``-1`` is @@ -395,3 +395,42 @@ Object Protocol returns ``NULL`` if the object cannot be iterated. .. versionadded:: 3.10 + +.. c:function:: void *PyObject_GetTypeData(PyObject *o, PyTypeObject *cls) + + Get a pointer to subclass-specific data reserved for *cls*. + + The object *o* must be an instance of *cls*, and *cls* must have been + created using negative :c:member:`PyType_Spec.basicsize`. + Python does not check this. + + On error, set an exception and return ``NULL``. + + .. versionadded:: 3.12 + +.. c:function:: Py_ssize_t PyType_GetTypeDataSize(PyTypeObject *cls) + + Return the size of the instance memory space reserved for *cls*, i.e. the size of the + memory :c:func:`PyObject_GetTypeData` returns. + + This may be larger than requested using :c:member:`-PyType_Spec.basicsize `; + it is safe to use this larger size (e.g. with :c:func:`!memset`). + + The type *cls* **must** have been created using + negative :c:member:`PyType_Spec.basicsize`. + Python does not check this. + + On error, set an exception and return a negative value. + + .. versionadded:: 3.12 + +.. c:function:: void *PyObject_GetItemData(PyObject *o) + + Get a pointer to per-item data for a class with + :const:`Py_TPFLAGS_ITEMS_AT_END`. + + On error, set an exception and return ``NULL``. + :py:exc:`TypeError` is raised if *o* does not have + :const:`Py_TPFLAGS_ITEMS_AT_END` set. + + .. versionadded:: 3.12 diff --git a/Doc/c-api/sequence.rst b/Doc/c-api/sequence.rst index c78d273f9f149f..402a3e5e09ff56 100644 --- a/Doc/c-api/sequence.rst +++ b/Doc/c-api/sequence.rst @@ -18,7 +18,7 @@ Sequence Protocol .. c:function:: Py_ssize_t PySequence_Size(PyObject *o) Py_ssize_t PySequence_Length(PyObject *o) - .. index:: builtin: len + .. index:: pair: built-in function; len Returns the number of objects in sequence *o* on success, and ``-1`` on failure. This is equivalent to the Python expression ``len(o)``. @@ -120,7 +120,7 @@ Sequence Protocol .. c:function:: PyObject* PySequence_Tuple(PyObject *o) - .. index:: builtin: tuple + .. index:: pair: built-in function; tuple Return a tuple object with the same contents as the sequence or iterable *o*, or ``NULL`` on failure. If *o* is a tuple, a new reference will be returned, diff --git a/Doc/c-api/set.rst b/Doc/c-api/set.rst index f0d905bae8ae44..d642a5f1902e2e 100644 --- a/Doc/c-api/set.rst +++ b/Doc/c-api/set.rst @@ -9,8 +9,8 @@ Set Objects .. index:: - object: set - object: frozenset + pair: object; set + pair: object; frozenset This section details the public API for :class:`set` and :class:`frozenset` objects. Any functionality not listed below is best accessed using either @@ -107,7 +107,7 @@ or :class:`frozenset` or instances of their subtypes. .. c:function:: Py_ssize_t PySet_Size(PyObject *anyset) - .. index:: builtin: len + .. index:: pair: built-in function; len Return the length of a :class:`set` or :class:`frozenset` object. Equivalent to ``len(anyset)``. Raises a :exc:`PyExc_SystemError` if *anyset* is not a diff --git a/Doc/c-api/structures.rst b/Doc/c-api/structures.rst index 9618a0cf676972..aae1b951804491 100644 --- a/Doc/c-api/structures.rst +++ b/Doc/c-api/structures.rst @@ -347,7 +347,7 @@ method. .. data:: METH_CLASS - .. index:: builtin: classmethod + .. index:: pair: built-in function; classmethod The method will be passed the type object as the first parameter rather than an instance of the type. This is used to create *class methods*, @@ -357,7 +357,7 @@ method. .. data:: METH_STATIC - .. index:: builtin: staticmethod + .. index:: pair: built-in function; staticmethod The method will be passed ``NULL`` as the first parameter rather than an instance of the type. This is used to create *static methods*, similar to @@ -486,6 +486,22 @@ The following flags can be used with :c:member:`PyMemberDef.flags`: Emit an ``object.__getattr__`` :ref:`audit event ` before reading. +.. c:macro:: Py_RELATIVE_OFFSET + + Indicates that the :c:member:`~PyMemberDef.offset` of this ``PyMemberDef`` + entry indicates an offset from the subclass-specific data, rather than + from ``PyObject``. + + Can only be used as part of :c:member:`Py_tp_members ` + :c:type:`slot ` when creating a class using negative + :c:member:`~PyTypeDef.basicsize`. + It is mandatory in that case. + + This flag is only used in :c:type:`PyTypeSlot`. + When setting :c:member:`~PyTypeObject.tp_members` during + class creation, Python clears it and sets + :c:member:`PyMemberDef.offset` to the offset from the ``PyObject`` struct. + .. index:: single: READ_RESTRICTED single: WRITE_RESTRICTED diff --git a/Doc/c-api/tuple.rst b/Doc/c-api/tuple.rst index 5acddf7849aa33..ac62058676eeeb 100644 --- a/Doc/c-api/tuple.rst +++ b/Doc/c-api/tuple.rst @@ -5,7 +5,7 @@ Tuple Objects ------------- -.. index:: object: tuple +.. index:: pair: object; tuple .. c:type:: PyTupleObject diff --git a/Doc/c-api/type.rst b/Doc/c-api/type.rst index 69b15296993301..fb38935e003336 100644 --- a/Doc/c-api/type.rst +++ b/Doc/c-api/type.rst @@ -5,7 +5,7 @@ Type Objects ------------ -.. index:: object: type +.. index:: pair: object; type .. c:type:: PyTypeObject @@ -256,8 +256,13 @@ The following functions and structs are used to create The metaclass *metaclass* is used to construct the resulting type object. When *metaclass* is ``NULL``, the metaclass is derived from *bases* (or *Py_tp_base[s]* slots if *bases* is ``NULL``, see below). - Note that metaclasses that override - :c:member:`~PyTypeObject.tp_new` are not supported. + + Metaclasses that override :c:member:`~PyTypeObject.tp_new` are not + supported. + (For backwards compatibility, other ``PyType_From*`` functions allow + such metaclasses. They ignore ``tp_new``, which may result in incomplete + initialization. This is deprecated and in Python 3.14+ such metaclasses will + not be supported.) The *bases* argument can be used to specify base classes; it can either be only one class or a tuple of classes. @@ -305,6 +310,11 @@ The following functions and structs are used to create The function now finds and uses a metaclass corresponding to the provided base classes. Previously, only :class:`type` instances were returned. + The :c:member:`~PyTypeObject.tp_new` of the metaclass is *ignored*. + which may result in incomplete initialization. + Creating classes whose metaclass overrides + :c:member:`~PyTypeObject.tp_new` is deprecated and in Python 3.14+ it + will be no longer allowed. .. c:function:: PyObject* PyType_FromSpecWithBases(PyType_Spec *spec, PyObject *bases) @@ -317,6 +327,12 @@ The following functions and structs are used to create The function now finds and uses a metaclass corresponding to the provided base classes. Previously, only :class:`type` instances were returned. + The :c:member:`~PyTypeObject.tp_new` of the metaclass is *ignored*. + which may result in incomplete initialization. + Creating classes whose metaclass overrides + :c:member:`~PyTypeObject.tp_new` is deprecated and in Python 3.14+ it + will be no longer allowed. + .. c:function:: PyObject* PyType_FromSpec(PyType_Spec *spec) Equivalent to ``PyType_FromMetaclass(NULL, NULL, spec, NULL)``. @@ -327,29 +343,67 @@ The following functions and structs are used to create base classes provided in *Py_tp_base[s]* slots. Previously, only :class:`type` instances were returned. + The :c:member:`~PyTypeObject.tp_new` of the metaclass is *ignored*. + which may result in incomplete initialization. + Creating classes whose metaclass overrides + :c:member:`~PyTypeObject.tp_new` is deprecated and in Python 3.14+ it + will be no longer allowed. + .. c:type:: PyType_Spec Structure defining a type's behavior. - .. c:member:: const char* PyType_Spec.name + .. c:member:: const char* name Name of the type, used to set :c:member:`PyTypeObject.tp_name`. - .. c:member:: int PyType_Spec.basicsize - .. c:member:: int PyType_Spec.itemsize + .. c:member:: int basicsize + + If positive, specifies the size of the instance in bytes. + It is used to set :c:member:`PyTypeObject.tp_basicsize`. + + If zero, specifies that :c:member:`~PyTypeObject.tp_basicsize` + should be inherited. + + If negative, the absolute value specifies how much space instances of the + class need *in addition* to the superclass. + Use :c:func:`PyObject_GetTypeData` to get a pointer to subclass-specific + memory reserved this way. + + .. versionchanged:: 3.12 + + Previously, this field could not be negative. + + .. c:member:: int itemsize + + Size of one element of a variable-size type, in bytes. + Used to set :c:member:`PyTypeObject.tp_itemsize`. + See ``tp_itemsize`` documentation for caveats. + + If zero, :c:member:`~PyTypeObject.tp_itemsize` is inherited. + Extending arbitrary variable-sized classes is dangerous, + since some types use a fixed offset for variable-sized memory, + which can then overlap fixed-sized memory used by a subclass. + To help prevent mistakes, inheriting ``itemsize`` is only possible + in the following situations: - Size of the instance in bytes, used to set - :c:member:`PyTypeObject.tp_basicsize` and - :c:member:`PyTypeObject.tp_itemsize`. + - The base is not variable-sized (its + :c:member:`~PyTypeObject.tp_itemsize`). + - The requested :c:member:`PyType_Spec.basicsize` is positive, + suggesting that the memory layout of the base class is known. + - The requested :c:member:`PyType_Spec.basicsize` is zero, + suggesting that the subclass does not access the instance's memory + directly. + - With the :const:`Py_TPFLAGS_ITEMS_AT_END` flag. - .. c:member:: int PyType_Spec.flags + .. c:member:: unsigned int flags Type flags, used to set :c:member:`PyTypeObject.tp_flags`. If the ``Py_TPFLAGS_HEAPTYPE`` flag is not set, :c:func:`PyType_FromSpecWithBases` sets it automatically. - .. c:member:: PyType_Slot *PyType_Spec.slots + .. c:member:: PyType_Slot *slots Array of :c:type:`PyType_Slot` structures. Terminated by the special slot value ``{0, NULL}``. diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst index e963b90628aa49..0584989233de3f 100644 --- a/Doc/c-api/typeobj.rst +++ b/Doc/c-api/typeobj.rst @@ -805,7 +805,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) .. c:member:: reprfunc PyTypeObject.tp_repr - .. index:: builtin: repr + .. index:: pair: built-in function; repr An optional pointer to a function that implements the built-in function :func:`repr`. @@ -870,7 +870,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) .. c:member:: hashfunc PyTypeObject.tp_hash - .. index:: builtin: hash + .. index:: pair: built-in function; hash An optional pointer to a function that implements the built-in function :func:`hash`. @@ -1171,6 +1171,26 @@ and :c:type:`PyType_Type` effectively act as defaults.) :c:member:`~PyTypeObject.tp_weaklistoffset` field is set in a superclass. + .. data:: Py_TPFLAGS_ITEMS_AT_END + + Only usable with variable-size types, i.e. ones with non-zero + :c:member:`~PyObject.tp_itemsize`. + + Indicates that the variable-sized portion of an instance of this type is + at the end of the instance's memory area, at an offset of + :c:expr:`Py_TYPE(obj)->tp_basicsize` (which may be different in each + subclass). + + When setting this flag, be sure that all superclasses either + use this memory layout, or are not variable-sized. + Python does not check this. + + .. versionadded:: 3.12 + + **Inheritance:** + + This flag is inherited. + .. XXX Document more flags here? diff --git a/Doc/conf.py b/Doc/conf.py index cef2a0e2837f6a..485c0bdf84df2e 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -91,6 +91,11 @@ # Avoid a warning with Sphinx >= 2.0 master_doc = 'contents' +# Allow translation of index directives +gettext_additional_targets = [ + 'index', +] + # Options for HTML output # ----------------------- @@ -264,11 +269,29 @@ linkcheck_allowed_redirects = { # bpo-NNNN -> BPO -> GH Issues - r'https://bugs.python.org/issue\?@action=redirect&bpo=\d+': 'https://github.com/python/cpython/issues/\d+', + r'https://bugs.python.org/issue\?@action=redirect&bpo=\d+': r'https://github.com/python/cpython/issues/\d+', # GH-NNNN used to refer to pull requests - r'https://github.com/python/cpython/issues/\d+': 'https://github.com/python/cpython/pull/\d+', + r'https://github.com/python/cpython/issues/\d+': r'https://github.com/python/cpython/pull/\d+', # :source:`something` linking files in the repository - r'https://github.com/python/cpython/tree/.*': 'https://github.com/python/cpython/blob/.*' + r'https://github.com/python/cpython/tree/.*': 'https://github.com/python/cpython/blob/.*', + # Intentional HTTP use at Misc/NEWS.d/3.5.0a1.rst + r'http://www.python.org/$': 'https://www.python.org/$', + # Used in license page, keep as is + r'https://www.zope.org/': r'https://www.zope.dev/', + # Microsoft's redirects to learn.microsoft.com + r'https://msdn.microsoft.com/.*': 'https://learn.microsoft.com/.*', + r'https://docs.microsoft.com/.*': 'https://learn.microsoft.com/.*', + r'https://go.microsoft.com/fwlink/\?LinkID=\d+': 'https://learn.microsoft.com/.*', + # Language redirects + r'https://toml.io': 'https://toml.io/en/', + r'https://www.redhat.com': 'https://www.redhat.com/en', + # Other redirects + r'https://www.boost.org/libs/.+': r'https://www.boost.org/doc/libs/\d_\d+_\d/.+', + r'https://support.microsoft.com/en-us/help/\d+': 'https://support.microsoft.com/en-us/topic/.+', + r'https://perf.wiki.kernel.org$': 'https://perf.wiki.kernel.org/index.php/Main_Page', + r'https://www.sqlite.org': 'https://www.sqlite.org/index.html', + r'https://mitpress.mit.edu/sicp$': 'https://mitpress.mit.edu/9780262510875/structure-and-interpretation-of-computer-programs/', + r'https://www.python.org/psf/': 'https://www.python.org/psf-landing/', } linkcheck_anchors_ignore = [ diff --git a/Doc/data/stable_abi.dat b/Doc/data/stable_abi.dat index 4cc06d22baaa93..f112d268129fd1 100644 --- a/Doc/data/stable_abi.dat +++ b/Doc/data/stable_abi.dat @@ -521,6 +521,7 @@ function,PyObject_GetAttrString,3.2,, function,PyObject_GetBuffer,3.11,, function,PyObject_GetItem,3.2,, function,PyObject_GetIter,3.2,, +function,PyObject_GetTypeData,3.12,, function,PyObject_HasAttr,3.2,, function,PyObject_HasAttrString,3.2,, function,PyObject_Hash,3.2,, @@ -675,6 +676,7 @@ function,PyType_GetModuleState,3.10,, function,PyType_GetName,3.11,, function,PyType_GetQualName,3.11,, function,PyType_GetSlot,3.4,, +function,PyType_GetTypeDataSize,3.12,, function,PyType_IsSubtype,3.2,, function,PyType_Modified,3.2,, function,PyType_Ready,3.2,, diff --git a/Doc/extending/newtypes.rst b/Doc/extending/newtypes.rst index 56b40acdb69fed..6852a385f0c63c 100644 --- a/Doc/extending/newtypes.rst +++ b/Doc/extending/newtypes.rst @@ -149,7 +149,7 @@ done. This can be done using the :c:func:`PyErr_Fetch` and .. index:: single: string; object representation - builtin: repr + pair: built-in function; repr Object Presentation ------------------- diff --git a/Doc/faq/extending.rst b/Doc/faq/extending.rst index 07282639e4f9b4..bc3080f60ee237 100644 --- a/Doc/faq/extending.rst +++ b/Doc/faq/extending.rst @@ -42,7 +42,7 @@ on what you're trying to do. .. XXX make sure these all work `Cython `_ and its relative `Pyrex -`_ are compilers +`_ are compilers that accept a slightly modified form of Python and generate the corresponding C code. Cython and Pyrex make it possible to write an extension without having to learn Python's C API. diff --git a/Doc/faq/general.rst b/Doc/faq/general.rst index 6256deb5797c89..a9b2622e02ef3b 100644 --- a/Doc/faq/general.rst +++ b/Doc/faq/general.rst @@ -54,8 +54,8 @@ commercial use, to sell copies of Python in source or binary form (modified or unmodified), or to sell products that incorporate Python in some form. We would still like to know about all commercial use of Python, of course. -See `the PSF license page `_ to find further -explanations and a link to the full text of the license. +See `the license page `_ to find further +explanations and the full text of the PSF License. The Python logo is trademarked, and in certain cases permission is required to use it. Consult `the Trademark Usage Policy @@ -215,7 +215,7 @@ every day, and Usenet readers are often more able to cope with this volume. Announcements of new software releases and events can be found in comp.lang.python.announce, a low-traffic moderated list that receives about five postings per day. It's available as `the python-announce mailing list -`_. +`_. More info about other mailing lists and newsgroups can be found at https://www.python.org/community/lists/. @@ -352,7 +352,7 @@ titled "Python X.Y Release Schedule", where X.Y is a version that hasn't been publicly released yet. New development is discussed on `the python-dev mailing list -`_. +`_. Is it reasonable to propose incompatible changes to Python? diff --git a/Doc/faq/programming.rst b/Doc/faq/programming.rst index 38f9b171618b26..ab5618db84f77e 100644 --- a/Doc/faq/programming.rst +++ b/Doc/faq/programming.rst @@ -61,7 +61,7 @@ Yes. `Pyflakes `_ do basic checking that will help you catch bugs sooner. -Static type checkers such as `Mypy `_, +Static type checkers such as `Mypy `_, `Pyre `_, and `Pytype `_ can check type hints in Python source code. diff --git a/Doc/howto/clinic.rst b/Doc/howto/clinic.rst index 8a10fe327358c0..6ebc2d9b0a71a9 100644 --- a/Doc/howto/clinic.rst +++ b/Doc/howto/clinic.rst @@ -1033,19 +1033,36 @@ you're not permitted to use: Using a return converter ------------------------ -By default the impl function Argument Clinic generates for you returns ``PyObject *``. -But your C function often computes some C type, then converts it into the ``PyObject *`` +By default, the impl function Argument Clinic generates for you returns +:c:type:`PyObject * `. +But your C function often computes some C type, +then converts it into the :c:type:`!PyObject *` at the last moment. Argument Clinic handles converting your inputs from Python types into native C types—why not have it convert your return value from a native C type into a Python type too? That's what a "return converter" does. It changes your impl function to return some C type, then adds code to the generated (non-impl) function to handle converting -that value into the appropriate ``PyObject *``. +that value into the appropriate :c:type:`!PyObject *`. The syntax for return converters is similar to that of parameter converters. You specify the return converter like it was a return annotation on the -function itself. Return converters behave much the same as parameter converters; +function itself, using ``->`` notation. + +For example: + +.. code-block:: c + + /*[clinic input] + add -> int + + a: int + b: int + / + + [clinic start generated code]*/ + +Return converters behave much the same as parameter converters; they take arguments, the arguments are all keyword-only, and if you're not changing any of the default arguments you can omit the parentheses. @@ -1066,19 +1083,17 @@ Currently Argument Clinic supports only a few return converters: .. code-block:: none bool + double + float int - unsigned int long - unsigned int - size_t Py_ssize_t - float - double - DecodeFSDefault + size_t + unsigned int + unsigned long -None of these take parameters. For the first three, return -1 to indicate -error. For ``DecodeFSDefault``, the return type is ``const char *``; return a ``NULL`` -pointer to indicate an error. +None of these take parameters. +For all of these, return ``-1`` to indicate error. To see all the return converters Argument Clinic supports, along with their parameters (if any), diff --git a/Doc/howto/curses.rst b/Doc/howto/curses.rst index 83d80471ffc8ee..a3068d86d85bc4 100644 --- a/Doc/howto/curses.rst +++ b/Doc/howto/curses.rst @@ -4,6 +4,8 @@ Curses Programming with Python ********************************** +.. currentmodule:: curses + :Author: A.M. Kuchling, Eric S. Raymond :Release: 2.04 @@ -65,7 +67,7 @@ The Python module is a fairly simple wrapper over the C functions provided by curses; if you're already familiar with curses programming in C, it's really easy to transfer that knowledge to Python. The biggest difference is that the Python interface makes things simpler by merging different C functions such as -:c:func:`addstr`, :c:func:`mvaddstr`, and :c:func:`mvwaddstr` into a single +:c:func:`!addstr`, :c:func:`!mvaddstr`, and :c:func:`!mvwaddstr` into a single :meth:`~curses.window.addstr` method. You'll see this covered in more detail later. @@ -82,7 +84,7 @@ Before doing anything, curses must be initialized. This is done by calling the :func:`~curses.initscr` function, which will determine the terminal type, send any required setup codes to the terminal, and create various internal data structures. If successful, -:func:`initscr` returns a window object representing the entire +:func:`!initscr` returns a window object representing the entire screen; this is usually called ``stdscr`` after the name of the corresponding C variable. :: @@ -151,8 +153,8 @@ importing the :func:`curses.wrapper` function and using it like this:: The :func:`~curses.wrapper` function takes a callable object and does the initializations described above, also initializing colors if color -support is present. :func:`wrapper` then runs your provided callable. -Once the callable returns, :func:`wrapper` will restore the original +support is present. :func:`!wrapper` then runs your provided callable. +Once the callable returns, :func:`!wrapper` will restore the original state of the terminal. The callable is called inside a :keyword:`try`...\ :keyword:`except` that catches exceptions, restores the state of the terminal, and then re-raises the exception. Therefore @@ -200,7 +202,7 @@ This is because curses was originally written with slow 300-baud terminal connections in mind; with these terminals, minimizing the time required to redraw the screen was very important. Instead curses accumulates changes to the screen and displays them in the most -efficient manner when you call :meth:`refresh`. For example, if your +efficient manner when you call :meth:`!refresh`. For example, if your program displays some text in a window and then clears the window, there's no need to send the original text because they're never visible. @@ -210,7 +212,7 @@ really complicate programming with curses much. Most programs go into a flurry of activity, and then pause waiting for a keypress or some other action on the part of the user. All you have to do is to be sure that the screen has been redrawn before pausing to wait for user input, by first calling -``stdscr.refresh()`` or the :meth:`refresh` method of some other relevant +:meth:`!stdscr.refresh` or the :meth:`!refresh` method of some other relevant window. A pad is a special case of a window; it can be larger than the actual display @@ -234,7 +236,7 @@ displayed. :: # : filled with pad content. pad.refresh( 0,0, 5,5, 20,75) -The :meth:`refresh` call displays a section of the pad in the rectangle +The :meth:`!refresh` call displays a section of the pad in the rectangle extending from coordinate (5,5) to coordinate (20,75) on the screen; the upper left corner of the displayed section is coordinate (0,0) on the pad. Beyond that difference, pads are exactly like ordinary windows and support the same @@ -242,7 +244,7 @@ methods. If you have multiple windows and pads on screen there is a more efficient way to update the screen and prevent annoying screen flicker -as each part of the screen gets updated. :meth:`refresh` actually +as each part of the screen gets updated. :meth:`!refresh` actually does two things: 1) Calls the :meth:`~curses.window.noutrefresh` method of each window @@ -251,8 +253,8 @@ does two things: 2) Calls the function :func:`~curses.doupdate` function to change the physical screen to match the desired state recorded in the data structure. -Instead you can call :meth:`noutrefresh` on a number of windows to -update the data structure, and then call :func:`doupdate` to update +Instead you can call :meth:`!noutrefresh` on a number of windows to +update the data structure, and then call :func:`!doupdate` to update the screen. @@ -261,11 +263,11 @@ Displaying Text From a C programmer's point of view, curses may sometimes look like a twisty maze of functions, all subtly different. For example, -:c:func:`addstr` displays a string at the current cursor location in -the ``stdscr`` window, while :c:func:`mvaddstr` moves to a given y,x -coordinate first before displaying the string. :c:func:`waddstr` is just -like :c:func:`addstr`, but allows specifying a window to use instead of -using ``stdscr`` by default. :c:func:`mvwaddstr` allows specifying both +:c:func:`!addstr` displays a string at the current cursor location in +the ``stdscr`` window, while :c:func:`!mvaddstr` moves to a given y,x +coordinate first before displaying the string. :c:func:`!waddstr` is just +like :c:func:`!addstr`, but allows specifying a window to use instead of +using ``stdscr`` by default. :c:func:`!mvwaddstr` allows specifying both a window and a coordinate. Fortunately the Python interface hides all these details. ``stdscr`` @@ -298,7 +300,7 @@ the next subsection. The :meth:`~curses.window.addstr` method takes a Python string or bytestring as the value to be displayed. The contents of bytestrings are sent to the terminal as-is. Strings are encoded to bytes using -the value of the window's :attr:`encoding` attribute; this defaults to +the value of the window's :attr:`~window.encoding` attribute; this defaults to the default system encoding as returned by :func:`locale.getencoding`. The :meth:`~curses.window.addch` methods take a character, which can be @@ -444,15 +446,15 @@ There are two methods for getting input from a window: It's possible to not wait for the user using the :meth:`~curses.window.nodelay` window method. After ``nodelay(True)``, -:meth:`getch` and :meth:`getkey` for the window become -non-blocking. To signal that no input is ready, :meth:`getch` returns -``curses.ERR`` (a value of -1) and :meth:`getkey` raises an exception. +:meth:`!getch` and :meth:`!getkey` for the window become +non-blocking. To signal that no input is ready, :meth:`!getch` returns +``curses.ERR`` (a value of -1) and :meth:`!getkey` raises an exception. There's also a :func:`~curses.halfdelay` function, which can be used to (in -effect) set a timer on each :meth:`getch`; if no input becomes +effect) set a timer on each :meth:`!getch`; if no input becomes available within a specified delay (measured in tenths of a second), curses raises an exception. -The :meth:`getch` method returns an integer; if it's between 0 and 255, it +The :meth:`!getch` method returns an integer; if it's between 0 and 255, it represents the ASCII code of the key pressed. Values greater than 255 are special keys such as Page Up, Home, or the cursor keys. You can compare the value returned to constants such as :const:`curses.KEY_PPAGE`, diff --git a/Doc/howto/pyporting.rst b/Doc/howto/pyporting.rst index add1c11be534e3..baea3e85c3b84b 100644 --- a/Doc/howto/pyporting.rst +++ b/Doc/howto/pyporting.rst @@ -438,7 +438,7 @@ to make sure everything functions as expected in both versions of Python. .. _Futurize: https://python-future.org/automatic_conversion.html .. _importlib2: https://pypi.org/project/importlib2 .. _Modernize: https://python-modernize.readthedocs.io/ -.. _mypy: http://mypy-lang.org/ +.. _mypy: https://mypy-lang.org/ .. _Porting to Python 3: http://python3porting.com/ .. _Pylint: https://pypi.org/project/pylint diff --git a/Doc/library/_thread.rst b/Doc/library/_thread.rst index 122692a428594f..ba9314e46ab6ea 100644 --- a/Doc/library/_thread.rst +++ b/Doc/library/_thread.rst @@ -208,7 +208,7 @@ In addition to these methods, lock objects can also be used via the **Caveats:** - .. index:: module: signal + .. index:: pair: module; signal * Threads interact strangely with interrupts: the :exc:`KeyboardInterrupt` exception will be received by an arbitrary thread. (When the :mod:`signal` diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index e982cc166a3f2d..8d0022cc66daac 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -529,7 +529,7 @@ Opening network connections specifies requirements for algorithms that reduce this user-visible delay and provides an algorithm. - For more information: https://tools.ietf.org/html/rfc6555 + For more information: https://datatracker.ietf.org/doc/html/rfc6555 .. versionchanged:: 3.11 diff --git a/Doc/library/asyncio-task.rst b/Doc/library/asyncio-task.rst index ba0f909c405a34..fe8d028150403d 100644 --- a/Doc/library/asyncio-task.rst +++ b/Doc/library/asyncio-task.rst @@ -527,6 +527,51 @@ Running Tasks Concurrently and there is no running event loop. +.. _eager-task-factory: + +Eager Task Factory +================== + +.. function:: eager_task_factory(loop, coro, *, name=None, context=None) + + A task factory for eager task execution. + + When using this factory (via :meth:`loop.set_task_factory(asyncio.eager_task_factory) `), + coroutines begin execution synchronously during :class:`Task` construction. + Tasks are only scheduled on the event loop if they block. + This can be a performance improvement as the overhead of loop scheduling + is avoided for coroutines that complete synchronously. + + A common example where this is beneficial is coroutines which employ + caching or memoization to avoid actual I/O when possible. + + .. note:: + + Immediate execution of the coroutine is a semantic change. + If the coroutine returns or raises, the task is never scheduled + to the event loop. If the coroutine execution blocks, the task is + scheduled to the event loop. This change may introduce behavior + changes to existing applications. For example, + the application's task execution order is likely to change. + + .. versionadded:: 3.12 + +.. function:: create_eager_task_factory(custom_task_constructor) + + Create an eager task factory, similar to :func:`eager_task_factory`, + using the provided *custom_task_constructor* when creating a new task instead + of the default :class:`Task`. + + *custom_task_constructor* must be a *callable* with the signature matching + the signature of :class:`Task.__init__ `. + The callable must return a :class:`asyncio.Task`-compatible object. + + This function returns a *callable* intended to be used as a task factory of an + event loop via :meth:`loop.set_task_factory(factory) `). + + .. versionadded:: 3.12 + + Shielding From Cancellation =========================== @@ -978,7 +1023,7 @@ Introspection Task Object =========== -.. class:: Task(coro, *, loop=None, name=None) +.. class:: Task(coro, *, loop=None, name=None, context=None, eager_start=False) A :class:`Future-like ` object that runs a Python :ref:`coroutine `. Not thread-safe. @@ -1013,9 +1058,17 @@ Task Object APIs except :meth:`Future.set_result` and :meth:`Future.set_exception`. - Tasks support the :mod:`contextvars` module. When a Task - is created it copies the current context and later runs its - coroutine in the copied context. + An optional keyword-only *context* argument allows specifying a + custom :class:`contextvars.Context` for the *coro* to run in. + If no *context* is provided, the Task copies the current context + and later runs its coroutine in the copied context. + + An optional keyword-only *eager_start* argument allows eagerly starting + the execution of the :class:`asyncio.Task` at task creation time. + If set to ``True`` and the event loop is running, the task will start + executing the coroutine immediately, until the first time the coroutine + blocks. If the coroutine returns or raises without blocking, the task + will be finished eagerly and will skip scheduling to the event loop. .. versionchanged:: 3.7 Added support for the :mod:`contextvars` module. @@ -1027,6 +1080,12 @@ Task Object Deprecation warning is emitted if *loop* is not specified and there is no running event loop. + .. versionchanged:: 3.11 + Added the *context* parameter. + + .. versionchanged:: 3.12 + Added the *eager_start* parameter. + .. method:: done() Return ``True`` if the Task is *done*. @@ -1117,8 +1176,17 @@ Task Object Return the coroutine object wrapped by the :class:`Task`. + .. note:: + + This will return ``None`` for Tasks which have already + completed eagerly. See the :ref:`Eager Task Factory `. + .. versionadded:: 3.8 + .. versionchanged:: 3.12 + + Newly added eager task execution means result may be ``None``. + .. method:: get_context() Return the :class:`contextvars.Context` object diff --git a/Doc/library/binascii.rst b/Doc/library/binascii.rst index 5a0815faa38eac..21960cb7972e6e 100644 --- a/Doc/library/binascii.rst +++ b/Doc/library/binascii.rst @@ -6,8 +6,8 @@ representations. .. index:: - module: uu - module: base64 + pair: module; uu + pair: module; base64 -------------- diff --git a/Doc/library/bisect.rst b/Doc/library/bisect.rst index e3c8c801904b61..8022c596f0af97 100644 --- a/Doc/library/bisect.rst +++ b/Doc/library/bisect.rst @@ -24,6 +24,8 @@ method to determine whether a value has been found. Instead, the functions only call the :meth:`__lt__` method and will return an insertion point between values in an array. +.. _bisect functions: + The following functions are provided: @@ -55,7 +57,7 @@ The following functions are provided: .. function:: bisect_right(a, x, lo=0, hi=len(a), *, key=None) bisect(a, x, lo=0, hi=len(a), *, key=None) - Similar to :func:`bisect_left`, but returns an insertion point which comes + Similar to :py:func:`~bisect.bisect_left`, but returns an insertion point which comes after (to the right of) any existing entries of *x* in *a*. The returned insertion point *ip* partitions the array *a* into two slices @@ -70,7 +72,7 @@ The following functions are provided: Insert *x* in *a* in sorted order. - This function first runs :func:`bisect_left` to locate an insertion point. + This function first runs :py:func:`~bisect.bisect_left` to locate an insertion point. Next, it runs the :meth:`insert` method on *a* to insert *x* at the appropriate position to maintain sort order. @@ -87,10 +89,10 @@ The following functions are provided: .. function:: insort_right(a, x, lo=0, hi=len(a), *, key=None) insort(a, x, lo=0, hi=len(a), *, key=None) - Similar to :func:`insort_left`, but inserting *x* in *a* after any existing + Similar to :py:func:`~bisect.insort_left`, but inserting *x* in *a* after any existing entries of *x*. - This function first runs :func:`bisect_right` to locate an insertion point. + This function first runs :py:func:`~bisect.bisect_right` to locate an insertion point. Next, it runs the :meth:`insert` method on *a* to insert *x* at the appropriate position to maintain sort order. @@ -120,7 +122,7 @@ thoughts in mind: they are used. Consequently, if the search functions are used in a loop, the key function may be called again and again on the same array elements. If the key function isn't fast, consider wrapping it with - :func:`functools.cache` to avoid duplicate computations. Alternatively, + :py:func:`functools.cache` to avoid duplicate computations. Alternatively, consider searching an array of precomputed keys to locate the insertion point (as shown in the examples section below). @@ -140,7 +142,7 @@ thoughts in mind: Searching Sorted Lists ---------------------- -The above :func:`bisect` functions are useful for finding insertion points but +The above `bisect functions`_ are useful for finding insertion points but can be tricky or awkward to use for common searching tasks. The following five functions show how to transform them into the standard lookups for sorted lists:: @@ -186,8 +188,8 @@ Examples .. _bisect-example: -The :func:`bisect` function can be useful for numeric table lookups. This -example uses :func:`bisect` to look up a letter grade for an exam score (say) +The :py:func:`~bisect.bisect` function can be useful for numeric table lookups. This +example uses :py:func:`~bisect.bisect` to look up a letter grade for an exam score (say) based on a set of ordered numeric breakpoints: 90 and up is an 'A', 80 to 89 is a 'B', and so on:: @@ -198,8 +200,8 @@ a 'B', and so on:: >>> [grade(score) for score in [33, 99, 77, 70, 89, 90, 100]] ['F', 'A', 'C', 'C', 'B', 'A', 'A'] -The :func:`bisect` and :func:`insort` functions also work with lists of -tuples. The *key* argument can serve to extract the field used for ordering +The :py:func:`~bisect.bisect` and :py:func:`~bisect.insort` functions also work with +lists of tuples. The *key* argument can serve to extract the field used for ordering records in a table:: >>> from collections import namedtuple diff --git a/Doc/library/cmath.rst b/Doc/library/cmath.rst index 5ed7a09b3e9db2..b17d58e1cc0ce1 100644 --- a/Doc/library/cmath.rst +++ b/Doc/library/cmath.rst @@ -301,7 +301,7 @@ Constants .. versionadded:: 3.6 -.. index:: module: math +.. index:: pair: module; math Note that the selection of functions is similar, but not identical, to that in module :mod:`math`. The reason for having two modules is that some users aren't diff --git a/Doc/library/collections.abc.rst b/Doc/library/collections.abc.rst index 1ada0d352a0cc6..43a3286ba832cf 100644 --- a/Doc/library/collections.abc.rst +++ b/Doc/library/collections.abc.rst @@ -177,6 +177,7 @@ ABC Inherits from Abstract Methods Mi :class:`AsyncIterable` [1]_ ``__aiter__`` :class:`AsyncIterator` [1]_ :class:`AsyncIterable` ``__anext__`` ``__aiter__`` :class:`AsyncGenerator` [1]_ :class:`AsyncIterator` ``asend``, ``athrow`` ``aclose``, ``__aiter__``, ``__anext__`` +:class:`Buffer` [1]_ ``__buffer__`` ============================== ====================== ======================= ==================================================== @@ -272,6 +273,12 @@ Collections Abstract Base Classes -- Detailed Descriptions The index() method added support for *stop* and *start* arguments. + .. deprecated-removed:: 3.12 3.14 + The :class:`ByteString` ABC has been deprecated. + For use in typing, prefer a union, like ``bytes | bytearray``, or + :class:`collections.abc.Buffer`. + For use as an ABC, prefer :class:`Sequence` or :class:`collections.abc.Buffer`. + .. class:: Set MutableSet @@ -346,6 +353,13 @@ Collections Abstract Base Classes -- Detailed Descriptions .. versionadded:: 3.6 +.. class:: Buffer + + ABC for classes that provide the :meth:`~object.__buffer__` method, + implementing the :ref:`buffer protocol `. See :pep:`688`. + + .. versionadded:: 3.12 + Examples and Recipes -------------------- diff --git a/Doc/library/copy.rst b/Doc/library/copy.rst index a8bc2fa55ea8c3..8f32477ed508c3 100644 --- a/Doc/library/copy.rst +++ b/Doc/library/copy.rst @@ -68,7 +68,7 @@ Shallow copies of dictionaries can be made using :meth:`dict.copy`, and of lists by assigning a slice of the entire list, for example, ``copied_list = original_list[:]``. -.. index:: module: pickle +.. index:: pair: module; pickle Classes can use the same interfaces to control copying that they use to control pickling. See the description of module :mod:`pickle` for information on these diff --git a/Doc/library/copyreg.rst b/Doc/library/copyreg.rst index 2107215c0c1967..2a28c043f80723 100644 --- a/Doc/library/copyreg.rst +++ b/Doc/library/copyreg.rst @@ -7,8 +7,8 @@ **Source code:** :source:`Lib/copyreg.py` .. index:: - module: pickle - module: copy + pair: module; pickle + pair: module; copy -------------- @@ -29,7 +29,7 @@ Such constructors may be factory functions or class instances. Declares that *function* should be used as a "reduction" function for objects of type *type*. *function* must return either a string or a tuple - containing two or five elements. See the :attr:`~pickle.Pickler.dispatch_table` + containing between two and six elements. See the :attr:`~pickle.Pickler.dispatch_table` for more details on the interface of *function*. The *constructor_ob* parameter is a legacy feature and is now ignored, but if diff --git a/Doc/library/curses.ascii.rst b/Doc/library/curses.ascii.rst index e1d1171927c9e2..410b76e77c025b 100644 --- a/Doc/library/curses.ascii.rst +++ b/Doc/library/curses.ascii.rst @@ -15,81 +15,81 @@ The :mod:`curses.ascii` module supplies name constants for ASCII characters and functions to test membership in various ASCII character classes. The constants supplied are names for control characters as follows: -+--------------+----------------------------------------------+ -| Name | Meaning | -+==============+==============================================+ -| :const:`NUL` | | -+--------------+----------------------------------------------+ -| :const:`SOH` | Start of heading, console interrupt | -+--------------+----------------------------------------------+ -| :const:`STX` | Start of text | -+--------------+----------------------------------------------+ -| :const:`ETX` | End of text | -+--------------+----------------------------------------------+ -| :const:`EOT` | End of transmission | -+--------------+----------------------------------------------+ -| :const:`ENQ` | Enquiry, goes with :const:`ACK` flow control | -+--------------+----------------------------------------------+ -| :const:`ACK` | Acknowledgement | -+--------------+----------------------------------------------+ -| :const:`BEL` | Bell | -+--------------+----------------------------------------------+ -| :const:`BS` | Backspace | -+--------------+----------------------------------------------+ -| :const:`TAB` | Tab | -+--------------+----------------------------------------------+ -| :const:`HT` | Alias for :const:`TAB`: "Horizontal tab" | -+--------------+----------------------------------------------+ -| :const:`LF` | Line feed | -+--------------+----------------------------------------------+ -| :const:`NL` | Alias for :const:`LF`: "New line" | -+--------------+----------------------------------------------+ -| :const:`VT` | Vertical tab | -+--------------+----------------------------------------------+ -| :const:`FF` | Form feed | -+--------------+----------------------------------------------+ -| :const:`CR` | Carriage return | -+--------------+----------------------------------------------+ -| :const:`SO` | Shift-out, begin alternate character set | -+--------------+----------------------------------------------+ -| :const:`SI` | Shift-in, resume default character set | -+--------------+----------------------------------------------+ -| :const:`DLE` | Data-link escape | -+--------------+----------------------------------------------+ -| :const:`DC1` | XON, for flow control | -+--------------+----------------------------------------------+ -| :const:`DC2` | Device control 2, block-mode flow control | -+--------------+----------------------------------------------+ -| :const:`DC3` | XOFF, for flow control | -+--------------+----------------------------------------------+ -| :const:`DC4` | Device control 4 | -+--------------+----------------------------------------------+ -| :const:`NAK` | Negative acknowledgement | -+--------------+----------------------------------------------+ -| :const:`SYN` | Synchronous idle | -+--------------+----------------------------------------------+ -| :const:`ETB` | End transmission block | -+--------------+----------------------------------------------+ -| :const:`CAN` | Cancel | -+--------------+----------------------------------------------+ -| :const:`EM` | End of medium | -+--------------+----------------------------------------------+ -| :const:`SUB` | Substitute | -+--------------+----------------------------------------------+ -| :const:`ESC` | Escape | -+--------------+----------------------------------------------+ -| :const:`FS` | File separator | -+--------------+----------------------------------------------+ -| :const:`GS` | Group separator | -+--------------+----------------------------------------------+ -| :const:`RS` | Record separator, block-mode terminator | -+--------------+----------------------------------------------+ -| :const:`US` | Unit separator | -+--------------+----------------------------------------------+ -| :const:`SP` | Space | -+--------------+----------------------------------------------+ -| :const:`DEL` | Delete | -+--------------+----------------------------------------------+ ++---------------+----------------------------------------------+ +| Name | Meaning | ++===============+==============================================+ +| .. data:: NUL | | ++---------------+----------------------------------------------+ +| .. data:: SOH | Start of heading, console interrupt | ++---------------+----------------------------------------------+ +| .. data:: STX | Start of text | ++---------------+----------------------------------------------+ +| .. data:: ETX | End of text | ++---------------+----------------------------------------------+ +| .. data:: EOT | End of transmission | ++---------------+----------------------------------------------+ +| .. data:: ENQ | Enquiry, goes with :const:`ACK` flow control | ++---------------+----------------------------------------------+ +| .. data:: ACK | Acknowledgement | ++---------------+----------------------------------------------+ +| .. data:: BEL | Bell | ++---------------+----------------------------------------------+ +| .. data:: BS | Backspace | ++---------------+----------------------------------------------+ +| .. data:: TAB | Tab | ++---------------+----------------------------------------------+ +| .. data:: HT | Alias for :const:`TAB`: "Horizontal tab" | ++---------------+----------------------------------------------+ +| .. data:: LF | Line feed | ++---------------+----------------------------------------------+ +| .. data:: NL | Alias for :const:`LF`: "New line" | ++---------------+----------------------------------------------+ +| .. data:: VT | Vertical tab | ++---------------+----------------------------------------------+ +| .. data:: FF | Form feed | ++---------------+----------------------------------------------+ +| .. data:: CR | Carriage return | ++---------------+----------------------------------------------+ +| .. data:: SO | Shift-out, begin alternate character set | ++---------------+----------------------------------------------+ +| .. data:: SI | Shift-in, resume default character set | ++---------------+----------------------------------------------+ +| .. data:: DLE | Data-link escape | ++---------------+----------------------------------------------+ +| .. data:: DC1 | XON, for flow control | ++---------------+----------------------------------------------+ +| .. data:: DC2 | Device control 2, block-mode flow control | ++---------------+----------------------------------------------+ +| .. data:: DC3 | XOFF, for flow control | ++---------------+----------------------------------------------+ +| .. data:: DC4 | Device control 4 | ++---------------+----------------------------------------------+ +| .. data:: NAK | Negative acknowledgement | ++---------------+----------------------------------------------+ +| .. data:: SYN | Synchronous idle | ++---------------+----------------------------------------------+ +| .. data:: ETB | End transmission block | ++---------------+----------------------------------------------+ +| .. data:: CAN | Cancel | ++---------------+----------------------------------------------+ +| .. data:: EM | End of medium | ++---------------+----------------------------------------------+ +| .. data:: SUB | Substitute | ++---------------+----------------------------------------------+ +| .. data:: ESC | Escape | ++---------------+----------------------------------------------+ +| .. data:: FS | File separator | ++---------------+----------------------------------------------+ +| .. data:: GS | Group separator | ++---------------+----------------------------------------------+ +| .. data:: RS | Record separator, block-mode terminator | ++---------------+----------------------------------------------+ +| .. data:: US | Unit separator | ++---------------+----------------------------------------------+ +| .. data:: SP | Space | ++---------------+----------------------------------------------+ +| .. data:: DEL | Delete | ++---------------+----------------------------------------------+ Note that many of these have little practical significance in modern usage. The mnemonics derive from teleprinter conventions that predate digital computers. diff --git a/Doc/library/curses.rst b/Doc/library/curses.rst index f50b51c3780ef0..cf208f3ba0db36 100644 --- a/Doc/library/curses.rst +++ b/Doc/library/curses.rst @@ -107,7 +107,7 @@ The module :mod:`curses` defines the following functions: Return the attribute value for displaying text in the specified color pair. Only the first 256 color pairs are supported. This attribute value can be combined with :const:`A_STANDOUT`, :const:`A_REVERSE`, - and the other :const:`A_\*` attributes. :func:`pair_number` is the counterpart + and the other :const:`!A_\*` attributes. :func:`pair_number` is the counterpart to this function. @@ -223,7 +223,7 @@ The module :mod:`curses` defines the following functions: .. function:: getwin(file) - Read window related data stored in the file by an earlier :func:`putwin` call. + Read window related data stored in the file by an earlier :func:`window.putwin` call. The routine then creates and initializes a new window using that data, returning the new window object. @@ -1323,9 +1323,9 @@ The :mod:`curses` module defines the following data members: .. data:: version +.. data:: __version__ - A bytes object representing the current version of the module. Also available as - :const:`__version__`. + A bytes object representing the current version of the module. .. data:: ncurses_version @@ -1339,51 +1339,55 @@ The :mod:`curses` module defines the following data members: .. versionadded:: 3.8 +.. data:: COLORS + + The maximum number of colors the terminal can support. + +.. data:: COLOR_PAIRS + + The maximum number of color pairs the terminal can support. Some constants are available to specify character cell attributes. The exact constants available are system dependent. -+------------------+-------------------------------+ -| Attribute | Meaning | -+==================+===============================+ -| ``A_ALTCHARSET`` | Alternate character set mode | -+------------------+-------------------------------+ -| ``A_BLINK`` | Blink mode | -+------------------+-------------------------------+ -| ``A_BOLD`` | Bold mode | -+------------------+-------------------------------+ -| ``A_DIM`` | Dim mode | -+------------------+-------------------------------+ -| ``A_INVIS`` | Invisible or blank mode | -+------------------+-------------------------------+ -| ``A_ITALIC`` | Italic mode | -+------------------+-------------------------------+ -| ``A_NORMAL`` | Normal attribute | -+------------------+-------------------------------+ -| ``A_PROTECT`` | Protected mode | -+------------------+-------------------------------+ -| ``A_REVERSE`` | Reverse background and | -| | foreground colors | -+------------------+-------------------------------+ -| ``A_STANDOUT`` | Standout mode | -+------------------+-------------------------------+ -| ``A_UNDERLINE`` | Underline mode | -+------------------+-------------------------------+ -| ``A_HORIZONTAL`` | Horizontal highlight | -+------------------+-------------------------------+ -| ``A_LEFT`` | Left highlight | -+------------------+-------------------------------+ -| ``A_LOW`` | Low highlight | -+------------------+-------------------------------+ -| ``A_RIGHT`` | Right highlight | -+------------------+-------------------------------+ -| ``A_TOP`` | Top highlight | -+------------------+-------------------------------+ -| ``A_VERTICAL`` | Vertical highlight | -+------------------+-------------------------------+ -| ``A_CHARTEXT`` | Bit-mask to extract a | -| | character | -+------------------+-------------------------------+ ++------------------------+-------------------------------+ +| Attribute | Meaning | ++========================+===============================+ +| .. data:: A_ALTCHARSET | Alternate character set mode | ++------------------------+-------------------------------+ +| .. data:: A_BLINK | Blink mode | ++------------------------+-------------------------------+ +| .. data:: A_BOLD | Bold mode | ++------------------------+-------------------------------+ +| .. data:: A_DIM | Dim mode | ++------------------------+-------------------------------+ +| .. data:: A_INVIS | Invisible or blank mode | ++------------------------+-------------------------------+ +| .. data:: A_ITALIC | Italic mode | ++------------------------+-------------------------------+ +| .. data:: A_NORMAL | Normal attribute | ++------------------------+-------------------------------+ +| .. data:: A_PROTECT | Protected mode | ++------------------------+-------------------------------+ +| .. data:: A_REVERSE | Reverse background and | +| | foreground colors | ++------------------------+-------------------------------+ +| .. data:: A_STANDOUT | Standout mode | ++------------------------+-------------------------------+ +| .. data:: A_UNDERLINE | Underline mode | ++------------------------+-------------------------------+ +| .. data:: A_HORIZONTAL | Horizontal highlight | ++------------------------+-------------------------------+ +| .. data:: A_LEFT | Left highlight | ++------------------------+-------------------------------+ +| .. data:: A_LOW | Low highlight | ++------------------------+-------------------------------+ +| .. data:: A_RIGHT | Right highlight | ++------------------------+-------------------------------+ +| .. data:: A_TOP | Top highlight | ++------------------------+-------------------------------+ +| .. data:: A_VERTICAL | Vertical highlight | ++------------------------+-------------------------------+ .. versionadded:: 3.7 ``A_ITALIC`` was added. @@ -1391,220 +1395,220 @@ The exact constants available are system dependent. Several constants are available to extract corresponding attributes returned by some methods. -+------------------+-------------------------------+ -| Bit-mask | Meaning | -+==================+===============================+ -| ``A_ATTRIBUTES`` | Bit-mask to extract | -| | attributes | -+------------------+-------------------------------+ -| ``A_CHARTEXT`` | Bit-mask to extract a | -| | character | -+------------------+-------------------------------+ -| ``A_COLOR`` | Bit-mask to extract | -| | color-pair field information | -+------------------+-------------------------------+ ++-------------------------+-------------------------------+ +| Bit-mask | Meaning | ++=========================+===============================+ +| .. data:: A_ATTRIBUTES | Bit-mask to extract | +| | attributes | ++-------------------------+-------------------------------+ +| .. data:: A_CHARTEXT | Bit-mask to extract a | +| | character | ++-------------------------+-------------------------------+ +| .. data:: A_COLOR | Bit-mask to extract | +| | color-pair field information | ++-------------------------+-------------------------------+ Keys are referred to by integer constants with names starting with ``KEY_``. The exact keycaps available are system dependent. .. XXX this table is far too large! should it be alphabetized? -+-------------------+--------------------------------------------+ -| Key constant | Key | -+===================+============================================+ -| ``KEY_MIN`` | Minimum key value | -+-------------------+--------------------------------------------+ -| ``KEY_BREAK`` | Break key (unreliable) | -+-------------------+--------------------------------------------+ -| ``KEY_DOWN`` | Down-arrow | -+-------------------+--------------------------------------------+ -| ``KEY_UP`` | Up-arrow | -+-------------------+--------------------------------------------+ -| ``KEY_LEFT`` | Left-arrow | -+-------------------+--------------------------------------------+ -| ``KEY_RIGHT`` | Right-arrow | -+-------------------+--------------------------------------------+ -| ``KEY_HOME`` | Home key (upward+left arrow) | -+-------------------+--------------------------------------------+ -| ``KEY_BACKSPACE`` | Backspace (unreliable) | -+-------------------+--------------------------------------------+ -| ``KEY_F0`` | Function keys. Up to 64 function keys are | -| | supported. | -+-------------------+--------------------------------------------+ -| ``KEY_Fn`` | Value of function key *n* | -+-------------------+--------------------------------------------+ -| ``KEY_DL`` | Delete line | -+-------------------+--------------------------------------------+ -| ``KEY_IL`` | Insert line | -+-------------------+--------------------------------------------+ -| ``KEY_DC`` | Delete character | -+-------------------+--------------------------------------------+ -| ``KEY_IC`` | Insert char or enter insert mode | -+-------------------+--------------------------------------------+ -| ``KEY_EIC`` | Exit insert char mode | -+-------------------+--------------------------------------------+ -| ``KEY_CLEAR`` | Clear screen | -+-------------------+--------------------------------------------+ -| ``KEY_EOS`` | Clear to end of screen | -+-------------------+--------------------------------------------+ -| ``KEY_EOL`` | Clear to end of line | -+-------------------+--------------------------------------------+ -| ``KEY_SF`` | Scroll 1 line forward | -+-------------------+--------------------------------------------+ -| ``KEY_SR`` | Scroll 1 line backward (reverse) | -+-------------------+--------------------------------------------+ -| ``KEY_NPAGE`` | Next page | -+-------------------+--------------------------------------------+ -| ``KEY_PPAGE`` | Previous page | -+-------------------+--------------------------------------------+ -| ``KEY_STAB`` | Set tab | -+-------------------+--------------------------------------------+ -| ``KEY_CTAB`` | Clear tab | -+-------------------+--------------------------------------------+ -| ``KEY_CATAB`` | Clear all tabs | -+-------------------+--------------------------------------------+ -| ``KEY_ENTER`` | Enter or send (unreliable) | -+-------------------+--------------------------------------------+ -| ``KEY_SRESET`` | Soft (partial) reset (unreliable) | -+-------------------+--------------------------------------------+ -| ``KEY_RESET`` | Reset or hard reset (unreliable) | -+-------------------+--------------------------------------------+ -| ``KEY_PRINT`` | Print | -+-------------------+--------------------------------------------+ -| ``KEY_LL`` | Home down or bottom (lower left) | -+-------------------+--------------------------------------------+ -| ``KEY_A1`` | Upper left of keypad | -+-------------------+--------------------------------------------+ -| ``KEY_A3`` | Upper right of keypad | -+-------------------+--------------------------------------------+ -| ``KEY_B2`` | Center of keypad | -+-------------------+--------------------------------------------+ -| ``KEY_C1`` | Lower left of keypad | -+-------------------+--------------------------------------------+ -| ``KEY_C3`` | Lower right of keypad | -+-------------------+--------------------------------------------+ -| ``KEY_BTAB`` | Back tab | -+-------------------+--------------------------------------------+ -| ``KEY_BEG`` | Beg (beginning) | -+-------------------+--------------------------------------------+ -| ``KEY_CANCEL`` | Cancel | -+-------------------+--------------------------------------------+ -| ``KEY_CLOSE`` | Close | -+-------------------+--------------------------------------------+ -| ``KEY_COMMAND`` | Cmd (command) | -+-------------------+--------------------------------------------+ -| ``KEY_COPY`` | Copy | -+-------------------+--------------------------------------------+ -| ``KEY_CREATE`` | Create | -+-------------------+--------------------------------------------+ -| ``KEY_END`` | End | -+-------------------+--------------------------------------------+ -| ``KEY_EXIT`` | Exit | -+-------------------+--------------------------------------------+ -| ``KEY_FIND`` | Find | -+-------------------+--------------------------------------------+ -| ``KEY_HELP`` | Help | -+-------------------+--------------------------------------------+ -| ``KEY_MARK`` | Mark | -+-------------------+--------------------------------------------+ -| ``KEY_MESSAGE`` | Message | -+-------------------+--------------------------------------------+ -| ``KEY_MOVE`` | Move | -+-------------------+--------------------------------------------+ -| ``KEY_NEXT`` | Next | -+-------------------+--------------------------------------------+ -| ``KEY_OPEN`` | Open | -+-------------------+--------------------------------------------+ -| ``KEY_OPTIONS`` | Options | -+-------------------+--------------------------------------------+ -| ``KEY_PREVIOUS`` | Prev (previous) | -+-------------------+--------------------------------------------+ -| ``KEY_REDO`` | Redo | -+-------------------+--------------------------------------------+ -| ``KEY_REFERENCE`` | Ref (reference) | -+-------------------+--------------------------------------------+ -| ``KEY_REFRESH`` | Refresh | -+-------------------+--------------------------------------------+ -| ``KEY_REPLACE`` | Replace | -+-------------------+--------------------------------------------+ -| ``KEY_RESTART`` | Restart | -+-------------------+--------------------------------------------+ -| ``KEY_RESUME`` | Resume | -+-------------------+--------------------------------------------+ -| ``KEY_SAVE`` | Save | -+-------------------+--------------------------------------------+ -| ``KEY_SBEG`` | Shifted Beg (beginning) | -+-------------------+--------------------------------------------+ -| ``KEY_SCANCEL`` | Shifted Cancel | -+-------------------+--------------------------------------------+ -| ``KEY_SCOMMAND`` | Shifted Command | -+-------------------+--------------------------------------------+ -| ``KEY_SCOPY`` | Shifted Copy | -+-------------------+--------------------------------------------+ -| ``KEY_SCREATE`` | Shifted Create | -+-------------------+--------------------------------------------+ -| ``KEY_SDC`` | Shifted Delete char | -+-------------------+--------------------------------------------+ -| ``KEY_SDL`` | Shifted Delete line | -+-------------------+--------------------------------------------+ -| ``KEY_SELECT`` | Select | -+-------------------+--------------------------------------------+ -| ``KEY_SEND`` | Shifted End | -+-------------------+--------------------------------------------+ -| ``KEY_SEOL`` | Shifted Clear line | -+-------------------+--------------------------------------------+ -| ``KEY_SEXIT`` | Shifted Exit | -+-------------------+--------------------------------------------+ -| ``KEY_SFIND`` | Shifted Find | -+-------------------+--------------------------------------------+ -| ``KEY_SHELP`` | Shifted Help | -+-------------------+--------------------------------------------+ -| ``KEY_SHOME`` | Shifted Home | -+-------------------+--------------------------------------------+ -| ``KEY_SIC`` | Shifted Input | -+-------------------+--------------------------------------------+ -| ``KEY_SLEFT`` | Shifted Left arrow | -+-------------------+--------------------------------------------+ -| ``KEY_SMESSAGE`` | Shifted Message | -+-------------------+--------------------------------------------+ -| ``KEY_SMOVE`` | Shifted Move | -+-------------------+--------------------------------------------+ -| ``KEY_SNEXT`` | Shifted Next | -+-------------------+--------------------------------------------+ -| ``KEY_SOPTIONS`` | Shifted Options | -+-------------------+--------------------------------------------+ -| ``KEY_SPREVIOUS`` | Shifted Prev | -+-------------------+--------------------------------------------+ -| ``KEY_SPRINT`` | Shifted Print | -+-------------------+--------------------------------------------+ -| ``KEY_SREDO`` | Shifted Redo | -+-------------------+--------------------------------------------+ -| ``KEY_SREPLACE`` | Shifted Replace | -+-------------------+--------------------------------------------+ -| ``KEY_SRIGHT`` | Shifted Right arrow | -+-------------------+--------------------------------------------+ -| ``KEY_SRSUME`` | Shifted Resume | -+-------------------+--------------------------------------------+ -| ``KEY_SSAVE`` | Shifted Save | -+-------------------+--------------------------------------------+ -| ``KEY_SSUSPEND`` | Shifted Suspend | -+-------------------+--------------------------------------------+ -| ``KEY_SUNDO`` | Shifted Undo | -+-------------------+--------------------------------------------+ -| ``KEY_SUSPEND`` | Suspend | -+-------------------+--------------------------------------------+ -| ``KEY_UNDO`` | Undo | -+-------------------+--------------------------------------------+ -| ``KEY_MOUSE`` | Mouse event has occurred | -+-------------------+--------------------------------------------+ -| ``KEY_RESIZE`` | Terminal resize event | -+-------------------+--------------------------------------------+ -| ``KEY_MAX`` | Maximum key value | -+-------------------+--------------------------------------------+ ++-------------------------+--------------------------------------------+ +| Key constant | Key | ++=========================+============================================+ +| .. data:: KEY_MIN | Minimum key value | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_BREAK | Break key (unreliable) | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_DOWN | Down-arrow | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_UP | Up-arrow | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_LEFT | Left-arrow | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_RIGHT | Right-arrow | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_HOME | Home key (upward+left arrow) | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_BACKSPACE | Backspace (unreliable) | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_F0 | Function keys. Up to 64 function keys are | +| | supported. | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_Fn | Value of function key *n* | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_DL | Delete line | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_IL | Insert line | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_DC | Delete character | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_IC | Insert char or enter insert mode | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_EIC | Exit insert char mode | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_CLEAR | Clear screen | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_EOS | Clear to end of screen | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_EOL | Clear to end of line | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SF | Scroll 1 line forward | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SR | Scroll 1 line backward (reverse) | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_NPAGE | Next page | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_PPAGE | Previous page | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_STAB | Set tab | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_CTAB | Clear tab | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_CATAB | Clear all tabs | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_ENTER | Enter or send (unreliable) | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SRESET | Soft (partial) reset (unreliable) | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_RESET | Reset or hard reset (unreliable) | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_PRINT | Print | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_LL | Home down or bottom (lower left) | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_A1 | Upper left of keypad | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_A3 | Upper right of keypad | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_B2 | Center of keypad | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_C1 | Lower left of keypad | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_C3 | Lower right of keypad | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_BTAB | Back tab | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_BEG | Beg (beginning) | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_CANCEL | Cancel | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_CLOSE | Close | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_COMMAND | Cmd (command) | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_COPY | Copy | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_CREATE | Create | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_END | End | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_EXIT | Exit | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_FIND | Find | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_HELP | Help | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_MARK | Mark | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_MESSAGE | Message | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_MOVE | Move | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_NEXT | Next | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_OPEN | Open | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_OPTIONS | Options | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_PREVIOUS | Prev (previous) | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_REDO | Redo | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_REFERENCE | Ref (reference) | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_REFRESH | Refresh | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_REPLACE | Replace | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_RESTART | Restart | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_RESUME | Resume | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SAVE | Save | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SBEG | Shifted Beg (beginning) | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SCANCEL | Shifted Cancel | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SCOMMAND | Shifted Command | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SCOPY | Shifted Copy | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SCREATE | Shifted Create | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SDC | Shifted Delete char | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SDL | Shifted Delete line | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SELECT | Select | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SEND | Shifted End | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SEOL | Shifted Clear line | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SEXIT | Shifted Exit | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SFIND | Shifted Find | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SHELP | Shifted Help | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SHOME | Shifted Home | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SIC | Shifted Input | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SLEFT | Shifted Left arrow | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SMESSAGE | Shifted Message | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SMOVE | Shifted Move | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SNEXT | Shifted Next | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SOPTIONS | Shifted Options | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SPREVIOUS | Shifted Prev | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SPRINT | Shifted Print | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SREDO | Shifted Redo | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SREPLACE | Shifted Replace | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SRIGHT | Shifted Right arrow | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SRSUME | Shifted Resume | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SSAVE | Shifted Save | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SSUSPEND | Shifted Suspend | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SUNDO | Shifted Undo | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_SUSPEND | Suspend | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_UNDO | Undo | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_MOUSE | Mouse event has occurred | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_RESIZE | Terminal resize event | ++-------------------------+--------------------------------------------+ +| .. data:: KEY_MAX | Maximum key value | ++-------------------------+--------------------------------------------+ On VT100s and their software emulations, such as X terminal emulators, there are -normally at least four function keys (:const:`KEY_F1`, :const:`KEY_F2`, -:const:`KEY_F3`, :const:`KEY_F4`) available, and the arrow keys mapped to +normally at least four function keys (:const:`KEY_F1 `, :const:`KEY_F2 `, +:const:`KEY_F3 `, :const:`KEY_F4 `) available, and the arrow keys mapped to :const:`KEY_UP`, :const:`KEY_DOWN`, :const:`KEY_LEFT` and :const:`KEY_RIGHT` in the obvious way. If your machine has a PC keyboard, it is safe to expect arrow keys and twelve function keys (older PC keyboards may have only ten function @@ -1635,117 +1639,143 @@ falls back on a crude printable ASCII approximation. These are available only after :func:`initscr` has been called. -+------------------+------------------------------------------+ -| ACS code | Meaning | -+==================+==========================================+ -| ``ACS_BBSS`` | alternate name for upper right corner | -+------------------+------------------------------------------+ -| ``ACS_BLOCK`` | solid square block | -+------------------+------------------------------------------+ -| ``ACS_BOARD`` | board of squares | -+------------------+------------------------------------------+ -| ``ACS_BSBS`` | alternate name for horizontal line | -+------------------+------------------------------------------+ -| ``ACS_BSSB`` | alternate name for upper left corner | -+------------------+------------------------------------------+ -| ``ACS_BSSS`` | alternate name for top tee | -+------------------+------------------------------------------+ -| ``ACS_BTEE`` | bottom tee | -+------------------+------------------------------------------+ -| ``ACS_BULLET`` | bullet | -+------------------+------------------------------------------+ -| ``ACS_CKBOARD`` | checker board (stipple) | -+------------------+------------------------------------------+ -| ``ACS_DARROW`` | arrow pointing down | -+------------------+------------------------------------------+ -| ``ACS_DEGREE`` | degree symbol | -+------------------+------------------------------------------+ -| ``ACS_DIAMOND`` | diamond | -+------------------+------------------------------------------+ -| ``ACS_GEQUAL`` | greater-than-or-equal-to | -+------------------+------------------------------------------+ -| ``ACS_HLINE`` | horizontal line | -+------------------+------------------------------------------+ -| ``ACS_LANTERN`` | lantern symbol | -+------------------+------------------------------------------+ -| ``ACS_LARROW`` | left arrow | -+------------------+------------------------------------------+ -| ``ACS_LEQUAL`` | less-than-or-equal-to | -+------------------+------------------------------------------+ -| ``ACS_LLCORNER`` | lower left-hand corner | -+------------------+------------------------------------------+ -| ``ACS_LRCORNER`` | lower right-hand corner | -+------------------+------------------------------------------+ -| ``ACS_LTEE`` | left tee | -+------------------+------------------------------------------+ -| ``ACS_NEQUAL`` | not-equal sign | -+------------------+------------------------------------------+ -| ``ACS_PI`` | letter pi | -+------------------+------------------------------------------+ -| ``ACS_PLMINUS`` | plus-or-minus sign | -+------------------+------------------------------------------+ -| ``ACS_PLUS`` | big plus sign | -+------------------+------------------------------------------+ -| ``ACS_RARROW`` | right arrow | -+------------------+------------------------------------------+ -| ``ACS_RTEE`` | right tee | -+------------------+------------------------------------------+ -| ``ACS_S1`` | scan line 1 | -+------------------+------------------------------------------+ -| ``ACS_S3`` | scan line 3 | -+------------------+------------------------------------------+ -| ``ACS_S7`` | scan line 7 | -+------------------+------------------------------------------+ -| ``ACS_S9`` | scan line 9 | -+------------------+------------------------------------------+ -| ``ACS_SBBS`` | alternate name for lower right corner | -+------------------+------------------------------------------+ -| ``ACS_SBSB`` | alternate name for vertical line | -+------------------+------------------------------------------+ -| ``ACS_SBSS`` | alternate name for right tee | -+------------------+------------------------------------------+ -| ``ACS_SSBB`` | alternate name for lower left corner | -+------------------+------------------------------------------+ -| ``ACS_SSBS`` | alternate name for bottom tee | -+------------------+------------------------------------------+ -| ``ACS_SSSB`` | alternate name for left tee | -+------------------+------------------------------------------+ -| ``ACS_SSSS`` | alternate name for crossover or big plus | -+------------------+------------------------------------------+ -| ``ACS_STERLING`` | pound sterling | -+------------------+------------------------------------------+ -| ``ACS_TTEE`` | top tee | -+------------------+------------------------------------------+ -| ``ACS_UARROW`` | up arrow | -+------------------+------------------------------------------+ -| ``ACS_ULCORNER`` | upper left corner | -+------------------+------------------------------------------+ -| ``ACS_URCORNER`` | upper right corner | -+------------------+------------------------------------------+ -| ``ACS_VLINE`` | vertical line | -+------------------+------------------------------------------+ ++------------------------+------------------------------------------+ +| ACS code | Meaning | ++========================+==========================================+ +| .. data:: ACS_BBSS | alternate name for upper right corner | ++------------------------+------------------------------------------+ +| .. data:: ACS_BLOCK | solid square block | ++------------------------+------------------------------------------+ +| .. data:: ACS_BOARD | board of squares | ++------------------------+------------------------------------------+ +| .. data:: ACS_BSBS | alternate name for horizontal line | ++------------------------+------------------------------------------+ +| .. data:: ACS_BSSB | alternate name for upper left corner | ++------------------------+------------------------------------------+ +| .. data:: ACS_BSSS | alternate name for top tee | ++------------------------+------------------------------------------+ +| .. data:: ACS_BTEE | bottom tee | ++------------------------+------------------------------------------+ +| .. data:: ACS_BULLET | bullet | ++------------------------+------------------------------------------+ +| .. data:: ACS_CKBOARD | checker board (stipple) | ++------------------------+------------------------------------------+ +| .. data:: ACS_DARROW | arrow pointing down | ++------------------------+------------------------------------------+ +| .. data:: ACS_DEGREE | degree symbol | ++------------------------+------------------------------------------+ +| .. data:: ACS_DIAMOND | diamond | ++------------------------+------------------------------------------+ +| .. data:: ACS_GEQUAL | greater-than-or-equal-to | ++------------------------+------------------------------------------+ +| .. data:: ACS_HLINE | horizontal line | ++------------------------+------------------------------------------+ +| .. data:: ACS_LANTERN | lantern symbol | ++------------------------+------------------------------------------+ +| .. data:: ACS_LARROW | left arrow | ++------------------------+------------------------------------------+ +| .. data:: ACS_LEQUAL | less-than-or-equal-to | ++------------------------+------------------------------------------+ +| .. data:: ACS_LLCORNER | lower left-hand corner | ++------------------------+------------------------------------------+ +| .. data:: ACS_LRCORNER | lower right-hand corner | ++------------------------+------------------------------------------+ +| .. data:: ACS_LTEE | left tee | ++------------------------+------------------------------------------+ +| .. data:: ACS_NEQUAL | not-equal sign | ++------------------------+------------------------------------------+ +| .. data:: ACS_PI | letter pi | ++------------------------+------------------------------------------+ +| .. data:: ACS_PLMINUS | plus-or-minus sign | ++------------------------+------------------------------------------+ +| .. data:: ACS_PLUS | big plus sign | ++------------------------+------------------------------------------+ +| .. data:: ACS_RARROW | right arrow | ++------------------------+------------------------------------------+ +| .. data:: ACS_RTEE | right tee | ++------------------------+------------------------------------------+ +| .. data:: ACS_S1 | scan line 1 | ++------------------------+------------------------------------------+ +| .. data:: ACS_S3 | scan line 3 | ++------------------------+------------------------------------------+ +| .. data:: ACS_S7 | scan line 7 | ++------------------------+------------------------------------------+ +| .. data:: ACS_S9 | scan line 9 | ++------------------------+------------------------------------------+ +| .. data:: ACS_SBBS | alternate name for lower right corner | ++------------------------+------------------------------------------+ +| .. data:: ACS_SBSB | alternate name for vertical line | ++------------------------+------------------------------------------+ +| .. data:: ACS_SBSS | alternate name for right tee | ++------------------------+------------------------------------------+ +| .. data:: ACS_SSBB | alternate name for lower left corner | ++------------------------+------------------------------------------+ +| .. data:: ACS_SSBS | alternate name for bottom tee | ++------------------------+------------------------------------------+ +| .. data:: ACS_SSSB | alternate name for left tee | ++------------------------+------------------------------------------+ +| .. data:: ACS_SSSS | alternate name for crossover or big plus | ++------------------------+------------------------------------------+ +| .. data:: ACS_STERLING | pound sterling | ++------------------------+------------------------------------------+ +| .. data:: ACS_TTEE | top tee | ++------------------------+------------------------------------------+ +| .. data:: ACS_UARROW | up arrow | ++------------------------+------------------------------------------+ +| .. data:: ACS_ULCORNER | upper left corner | ++------------------------+------------------------------------------+ +| .. data:: ACS_URCORNER | upper right corner | ++------------------------+------------------------------------------+ +| .. data:: ACS_VLINE | vertical line | ++------------------------+------------------------------------------+ + +The following table lists mouse button constants used by :meth:`getmouse`: + ++----------------------------------+---------------------------------------------+ +| Mouse button constant | Meaning | ++==================================+=============================================+ +| .. data:: BUTTONn_PRESSED | Mouse button *n* pressed | ++----------------------------------+---------------------------------------------+ +| .. data:: BUTTONn_RELEASED | Mouse button *n* released | ++----------------------------------+---------------------------------------------+ +| .. data:: BUTTONn_CLICKED | Mouse button *n* clicked | ++----------------------------------+---------------------------------------------+ +| .. data:: BUTTONn_DOUBLE_CLICKED | Mouse button *n* double clicked | ++----------------------------------+---------------------------------------------+ +| .. data:: BUTTONn_TRIPLE_CLICKED | Mouse button *n* triple clicked | ++----------------------------------+---------------------------------------------+ +| .. data:: BUTTON_SHIFT | Shift was down during button state change | ++----------------------------------+---------------------------------------------+ +| .. data:: BUTTON_CTRL | Control was down during button state change | ++----------------------------------+---------------------------------------------+ +| .. data:: BUTTON_ALT | Control was down during button state change | ++----------------------------------+---------------------------------------------+ + + .. versionchanged:: 3.10 + The ``BUTTON5_*`` constants are now exposed if they are provided by the + underlying curses library. The following table lists the predefined colors: -+-------------------+----------------------------+ -| Constant | Color | -+===================+============================+ -| ``COLOR_BLACK`` | Black | -+-------------------+----------------------------+ -| ``COLOR_BLUE`` | Blue | -+-------------------+----------------------------+ -| ``COLOR_CYAN`` | Cyan (light greenish blue) | -+-------------------+----------------------------+ -| ``COLOR_GREEN`` | Green | -+-------------------+----------------------------+ -| ``COLOR_MAGENTA`` | Magenta (purplish red) | -+-------------------+----------------------------+ -| ``COLOR_RED`` | Red | -+-------------------+----------------------------+ -| ``COLOR_WHITE`` | White | -+-------------------+----------------------------+ -| ``COLOR_YELLOW`` | Yellow | -+-------------------+----------------------------+ ++-------------------------+----------------------------+ +| Constant | Color | ++=========================+============================+ +| .. data:: COLOR_BLACK | Black | ++-------------------------+----------------------------+ +| .. data:: COLOR_BLUE | Blue | ++-------------------------+----------------------------+ +| .. data:: COLOR_CYAN | Cyan (light greenish blue) | ++-------------------------+----------------------------+ +| .. data:: COLOR_GREEN | Green | ++-------------------------+----------------------------+ +| .. data:: COLOR_MAGENTA | Magenta (purplish red) | ++-------------------------+----------------------------+ +| .. data:: COLOR_RED | Red | ++-------------------------+----------------------------+ +| .. data:: COLOR_WHITE | White | ++-------------------------+----------------------------+ +| .. data:: COLOR_YELLOW | Yellow | ++-------------------------+----------------------------+ :mod:`curses.textpad` --- Text input widget for curses programs @@ -1851,19 +1881,19 @@ You can instantiate a :class:`Textbox` object as follows: Move operations do nothing if the cursor is at an edge where the movement is not possible. The following synonyms are supported where possible: - +------------------------+------------------+ - | Constant | Keystroke | - +========================+==================+ - | :const:`KEY_LEFT` | :kbd:`Control-B` | - +------------------------+------------------+ - | :const:`KEY_RIGHT` | :kbd:`Control-F` | - +------------------------+------------------+ - | :const:`KEY_UP` | :kbd:`Control-P` | - +------------------------+------------------+ - | :const:`KEY_DOWN` | :kbd:`Control-N` | - +------------------------+------------------+ - | :const:`KEY_BACKSPACE` | :kbd:`Control-h` | - +------------------------+------------------+ + +--------------------------------+------------------+ + | Constant | Keystroke | + +================================+==================+ + | :const:`~curses.KEY_LEFT` | :kbd:`Control-B` | + +--------------------------------+------------------+ + | :const:`~curses.KEY_RIGHT` | :kbd:`Control-F` | + +--------------------------------+------------------+ + | :const:`~curses.KEY_UP` | :kbd:`Control-P` | + +--------------------------------+------------------+ + | :const:`~curses.KEY_DOWN` | :kbd:`Control-N` | + +--------------------------------+------------------+ + | :const:`~curses.KEY_BACKSPACE` | :kbd:`Control-h` | + +--------------------------------+------------------+ All other keystrokes are treated as a command to insert the given character and move right (with line wrapping). diff --git a/Doc/library/dis.rst b/Doc/library/dis.rst index 6c3f436ddb1494..248743b8fa0a87 100644 --- a/Doc/library/dis.rst +++ b/Doc/library/dis.rst @@ -1196,6 +1196,14 @@ iterations of the loop. .. versionadded:: 3.12 +.. opcode:: LOAD_FAST_AND_CLEAR (var_num) + + Pushes a reference to the local ``co_varnames[var_num]`` onto the stack (or + pushes ``NULL`` onto the stack if the local variable has not been + initialized) and sets ``co_varnames[var_num]`` to ``NULL``. + + .. versionadded:: 3.12 + .. opcode:: STORE_FAST (var_num) Stores ``STACK.pop()`` into the local ``co_varnames[var_num]``. @@ -1367,7 +1375,7 @@ iterations of the loop. .. opcode:: BUILD_SLICE (argc) - .. index:: builtin: slice + .. index:: pair: built-in function; slice Pushes a slice object on the stack. *argc* must be 2 or 3. If it is 2, implements:: diff --git a/Doc/library/exceptions.rst b/Doc/library/exceptions.rst index 18c3f47dddc079..4c84e5f855431a 100644 --- a/Doc/library/exceptions.rst +++ b/Doc/library/exceptions.rst @@ -4,8 +4,8 @@ Built-in Exceptions =================== .. index:: - statement: try - statement: except + pair: statement; try + pair: statement; except In Python, all exceptions must be instances of a class that derives from :class:`BaseException`. In a :keyword:`try` statement with an :keyword:`except` @@ -14,7 +14,7 @@ classes derived from that class (but not exception classes from which *it* is derived). Two exception classes that are not related via subclassing are never equivalent, even if they have the same name. -.. index:: statement: raise +.. index:: pair: statement; raise The built-in exceptions listed below can be generated by the interpreter or built-in functions. Except where mentioned, they have an "associated value" @@ -175,7 +175,7 @@ The following exceptions are the exceptions that are usually raised. .. exception:: AssertionError - .. index:: statement: assert + .. index:: pair: statement; assert Raised when an :keyword:`assert` statement fails. @@ -318,7 +318,7 @@ The following exceptions are the exceptions that are usually raised. .. exception:: OSError([arg]) OSError(errno, strerror[, filename[, winerror[, filename2]]]) - .. index:: module: errno + .. index:: pair: module; errno This exception is raised when a system function returns a system-related error, including I/O failures such as "file not found" or "disk full" diff --git a/Doc/library/fnmatch.rst b/Doc/library/fnmatch.rst index 46bf0fc2848058..aed8991d44772f 100644 --- a/Doc/library/fnmatch.rst +++ b/Doc/library/fnmatch.rst @@ -8,7 +8,7 @@ .. index:: single: filenames; wildcard expansion -.. index:: module: re +.. index:: pair: module; re -------------- @@ -38,7 +38,7 @@ special characters used in shell-style wildcards are: For a literal match, wrap the meta-characters in brackets. For example, ``'[?]'`` matches the character ``'?'``. -.. index:: module: glob +.. index:: pair: module; glob Note that the filename separator (``'/'`` on Unix) is *not* special to this module. See module :mod:`glob` for pathname expansion (:mod:`glob` uses diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst index a5e86ef0f9eb59..48a832db60e919 100644 --- a/Doc/library/functions.rst +++ b/Doc/library/functions.rst @@ -147,7 +147,7 @@ are always available. They are listed here in alphabetical order. or omitted, this returns ``False``; otherwise, it returns ``True``. The :class:`bool` class is a subclass of :class:`int` (see :ref:`typesnumeric`). It cannot be subclassed further. Its only instances are ``False`` and - ``True`` (see :ref:`bltin-boolean-values`). + ``True`` (see :ref:`typebool`). .. index:: pair: Boolean; type @@ -562,7 +562,7 @@ are always available. They are listed here in alphabetical order. Raises an :ref:`auditing event ` ``exec`` with the code object as the argument. Code compilation events may also be raised. -.. index:: builtin: exec +.. index:: pair: built-in function; exec .. function:: exec(object, globals=None, locals=None, /, *, closure=None) @@ -1340,7 +1340,7 @@ are always available. They are listed here in alphabetical order. single: I/O control; buffering single: binary mode single: text mode - module: sys + pair: module; sys See also the file handling modules, such as :mod:`fileinput`, :mod:`io` (where :func:`open` is declared), :mod:`os`, :mod:`os.path`, :mod:`tempfile`, @@ -1830,7 +1830,7 @@ are always available. They are listed here in alphabetical order. .. class:: type(object) type(name, bases, dict, **kwds) - .. index:: object: type + .. index:: pair: object; type With one argument, return the type of an *object*. The return value is a type object and generally the same object as returned by @@ -1986,7 +1986,8 @@ are always available. They are listed here in alphabetical order. .. function:: __import__(name, globals=None, locals=None, fromlist=(), level=0) .. index:: - statement: import + pair: statement; import + pair: module; builtins .. note:: diff --git a/Doc/library/functools.rst b/Doc/library/functools.rst index 29cbc87bf66d12..40f43f8b3519cd 100644 --- a/Doc/library/functools.rst +++ b/Doc/library/functools.rst @@ -110,18 +110,10 @@ The :mod:`functools` module defines the following functions: ``__slots__`` without including ``__dict__`` as one of the defined slots (as such classes don't provide a ``__dict__`` attribute at all). - If a mutable mapping is not available or if space-efficient key sharing - is desired, an effect similar to :func:`cached_property` can be achieved - by a stacking :func:`property` on top of :func:`cache`:: - - class DataSet: - def __init__(self, sequence_of_numbers): - self._data = sequence_of_numbers - - @property - @cache - def stdev(self): - return statistics.stdev(self._data) + If a mutable mapping is not available or if space-efficient key sharing is + desired, an effect similar to :func:`cached_property` can also be achieved by + stacking :func:`property` on top of :func:`lru_cache`. See + :ref:`faq-cache-method-calls` for more details on how this differs from :func:`cached_property`. .. versionadded:: 3.8 diff --git a/Doc/library/hashlib.rst b/Doc/library/hashlib.rst index f8d10c0c295c7a..6275f96f7d4d19 100644 --- a/Doc/library/hashlib.rst +++ b/Doc/library/hashlib.rst @@ -432,7 +432,7 @@ Constructor functions also accept the following tree hashing parameters: :alt: Explanation of tree mode parameters. See section 2.10 in `BLAKE2 specification -`_ for comprehensive review of tree +`_ for comprehensive review of tree hashing. @@ -619,7 +619,7 @@ on the hash function used in digital signatures. by the signer. (`NIST SP-800-106 "Randomized Hashing for Digital Signatures" - `_) + `_) In BLAKE2 the salt is processed as a one-time input to the hash function during initialization, rather than as an input to each compression function. @@ -628,7 +628,7 @@ initialization, rather than as an input to each compression function. *Salted hashing* (or just hashing) with BLAKE2 or any other general-purpose cryptographic hash function, such as SHA-256, is not suitable for hashing - passwords. See `BLAKE2 FAQ `_ for more + passwords. See `BLAKE2 FAQ `_ for more information. .. @@ -764,9 +764,9 @@ Domain Dedication 1.0 Universal: * *Alexandr Sokolovskiy* -.. _BLAKE2: https://blake2.net +.. _BLAKE2: https://www.blake2.net .. _HMAC: https://en.wikipedia.org/wiki/Hash-based_message_authentication_code -.. _BLAKE: https://131002.net/blake/ +.. _BLAKE: https://web.archive.org/web/20200918190133/https://131002.net/blake/ .. _SHA-3: https://en.wikipedia.org/wiki/NIST_hash_function_competition .. _ChaCha: https://cr.yp.to/chacha.html .. _pyblake2: https://pythonhosted.org/pyblake2/ @@ -782,7 +782,7 @@ Domain Dedication 1.0 Universal: Module :mod:`base64` Another way to encode binary hashes for non-binary environments. - https://blake2.net + https://www.blake2.net Official BLAKE2 website. https://csrc.nist.gov/csrc/media/publications/fips/180/2/archive/2002-08-01/documents/fips180-2.pdf diff --git a/Doc/library/http.client.rst b/Doc/library/http.client.rst index 38821b32c91cf1..eb8c1e198e2b09 100644 --- a/Doc/library/http.client.rst +++ b/Doc/library/http.client.rst @@ -10,7 +10,7 @@ pair: HTTP; protocol single: HTTP; http.client (standard module) -.. index:: module: urllib.request +.. index:: pair: module; urllib.request -------------- @@ -354,7 +354,7 @@ HTTPConnection Objects the CONNECT request. As HTTP/1.1 is used for HTTP CONNECT tunnelling request, `as per the RFC - `_, a HTTP ``Host:`` + `_, a HTTP ``Host:`` header must be provided, matching the authority-form of the request target provided as the destination for the CONNECT request. If a HTTP ``Host:`` header is not provided via the headers argument, one is generated and diff --git a/Doc/library/importlib.metadata.rst b/Doc/library/importlib.metadata.rst index b306d5f55a714f..d2cc769e2c8400 100644 --- a/Doc/library/importlib.metadata.rst +++ b/Doc/library/importlib.metadata.rst @@ -1,11 +1,11 @@ .. _using: -================================= - Using :mod:`!importlib.metadata` -================================= +======================================================== +:mod:`!importlib.metadata` -- Accessing package metadata +======================================================== .. module:: importlib.metadata - :synopsis: The implementation of the importlib metadata. + :synopsis: Accessing package metadata .. versionadded:: 3.8 .. versionchanged:: 3.10 @@ -13,7 +13,7 @@ **Source code:** :source:`Lib/importlib/metadata/__init__.py` -``importlib_metadata`` is a library that provides access to +``importlib.metadata`` is a library that provides access to the metadata of an installed `Distribution Package `_, such as its entry points or its top-level names (`Import Package `_\s, modules, if any). @@ -24,7 +24,7 @@ API`_ and `metadata API`_ of ``pkg_resources``. Along with this package can eliminate the need to use the older and less efficient ``pkg_resources`` package. -``importlib_metadata`` operates on third-party *distribution packages* +``importlib.metadata`` operates on third-party *distribution packages* installed into Python's ``site-packages`` directory via tools such as `pip `_. Specifically, it works with distributions with discoverable @@ -178,7 +178,7 @@ The "selectable" entry points were introduced in ``importlib_metadata`` no parameters and always returned a dictionary of entry points, keyed by group. With ``importlib_metadata`` 5.0 and Python 3.12, ``entry_points`` always returns an ``EntryPoints`` object. See -`backports.entry_points_selectable `_ +`backports.entry_points_selectable `_ for compatibility options. @@ -368,7 +368,7 @@ system :ref:`finders `. To find a distribution package's m ``importlib.metadata`` queries the list of :term:`meta path finders ` on :data:`sys.meta_path`. -By default ``importlib_metadata`` installs a finder for distribution packages +By default ``importlib.metadata`` installs a finder for distribution packages found on the file system. This finder doesn't actually find any *distributions*, but it can find their metadata. diff --git a/Doc/library/importlib.resources.rst b/Doc/library/importlib.resources.rst index 4c6aa59bf9f58f..755693840fecd8 100644 --- a/Doc/library/importlib.resources.rst +++ b/Doc/library/importlib.resources.rst @@ -1,5 +1,5 @@ -:mod:`importlib.resources` -- Resources ---------------------------------------- +:mod:`importlib.resources` -- Package resource reading, opening and access +-------------------------------------------------------------------------- .. module:: importlib.resources :synopsis: Package resource reading, opening, and access @@ -97,7 +97,7 @@ for example, a package and its resources can be imported from a zip file using Deprecated functions --------------------- +^^^^^^^^^^^^^^^^^^^^ An older, deprecated set of functions is still available, but is scheduled for removal in a future version of Python. diff --git a/Doc/library/importlib.rst b/Doc/library/importlib.rst index 89efa64c6b5203..65aaad0df9ee66 100644 --- a/Doc/library/importlib.rst +++ b/Doc/library/importlib.rst @@ -127,28 +127,6 @@ Functions .. versionchanged:: 3.3 Parent packages are automatically imported. -.. function:: find_loader(name, path=None) - - Find the loader for a module, optionally within the specified *path*. If the - module is in :attr:`sys.modules`, then ``sys.modules[name].__loader__`` is - returned (unless the loader would be ``None`` or is not set, in which case - :exc:`ValueError` is raised). Otherwise a search using :attr:`sys.meta_path` - is done. ``None`` is returned if no loader is found. - - A dotted name does not have its parents implicitly imported as that requires - loading them and that may not be desired. To properly import a submodule you - will need to import all parent packages of the submodule and use the correct - argument to *path*. - - .. versionadded:: 3.3 - - .. versionchanged:: 3.4 - If ``__loader__`` is not set, raise :exc:`ValueError`, just like when the - attribute is set to ``None``. - - .. deprecated:: 3.4 - Use :func:`importlib.util.find_spec` instead. - .. function:: invalidate_caches() Invalidate the internal caches of finders stored at @@ -247,7 +225,6 @@ are also provided to help in implementing the core ABCs. ABC hierarchy:: object - +-- Finder (deprecated) +-- MetaPathFinder +-- PathEntryFinder +-- Loader @@ -258,28 +235,6 @@ ABC hierarchy:: +-- SourceLoader -.. class:: Finder - - An abstract base class representing a :term:`finder`. - - .. deprecated:: 3.3 - Use :class:`MetaPathFinder` or :class:`PathEntryFinder` instead. - - .. abstractmethod:: find_module(fullname, path=None) - - An abstract method for finding a :term:`loader` for the specified - module. Originally specified in :pep:`302`, this method was meant - for use in :data:`sys.meta_path` and in the path-based import subsystem. - - .. versionchanged:: 3.4 - Returns ``None`` when called instead of raising - :exc:`NotImplementedError`. - - .. deprecated:: 3.10 - Implement :meth:`MetaPathFinder.find_spec` or - :meth:`PathEntryFinder.find_spec` instead. - - .. class:: MetaPathFinder An abstract base class representing a :term:`meta path finder`. @@ -287,7 +242,7 @@ ABC hierarchy:: .. versionadded:: 3.3 .. versionchanged:: 3.10 - No longer a subclass of :class:`Finder`. + No longer a subclass of :class:`!Finder`. .. method:: find_spec(fullname, path, target=None) @@ -303,25 +258,6 @@ ABC hierarchy:: .. versionadded:: 3.4 - .. method:: find_module(fullname, path) - - A legacy method for finding a :term:`loader` for the specified - module. If this is a top-level import, *path* will be ``None``. - Otherwise, this is a search for a subpackage or module and *path* - will be the value of :attr:`__path__` from the parent - package. If a loader cannot be found, ``None`` is returned. - - If :meth:`find_spec` is defined, backwards-compatible functionality is - provided. - - .. versionchanged:: 3.4 - Returns ``None`` when called instead of raising - :exc:`NotImplementedError`. Can use :meth:`find_spec` to provide - functionality. - - .. deprecated:: 3.4 - Use :meth:`find_spec` instead. - .. method:: invalidate_caches() An optional method which, when called, should invalidate any internal @@ -342,7 +278,7 @@ ABC hierarchy:: .. versionadded:: 3.3 .. versionchanged:: 3.10 - No longer a subclass of :class:`Finder`. + No longer a subclass of :class:`!Finder`. .. method:: find_spec(fullname, target=None) @@ -356,36 +292,6 @@ ABC hierarchy:: .. versionadded:: 3.4 - .. method:: find_loader(fullname) - - A legacy method for finding a :term:`loader` for the specified - module. Returns a 2-tuple of ``(loader, portion)`` where ``portion`` - is a sequence of file system locations contributing to part of a namespace - package. The loader may be ``None`` while specifying ``portion`` to - signify the contribution of the file system locations to a namespace - package. An empty list can be used for ``portion`` to signify the loader - is not part of a namespace package. If ``loader`` is ``None`` and - ``portion`` is the empty list then no loader or location for a namespace - package were found (i.e. failure to find anything for the module). - - If :meth:`find_spec` is defined then backwards-compatible functionality is - provided. - - .. versionchanged:: 3.4 - Returns ``(None, [])`` instead of raising :exc:`NotImplementedError`. - Uses :meth:`find_spec` when available to provide functionality. - - .. deprecated:: 3.4 - Use :meth:`find_spec` instead. - - .. method:: find_module(fullname) - - A concrete implementation of :meth:`Finder.find_module` which is - equivalent to ``self.find_loader(fullname)[0]``. - - .. deprecated:: 3.4 - Use :meth:`find_spec` instead. - .. method:: invalidate_caches() An optional method which, when called, should invalidate any internal @@ -881,13 +787,6 @@ find and load modules. is no longer valid then ``None`` is returned but no value is cached in :data:`sys.path_importer_cache`. - .. classmethod:: find_module(fullname, path=None) - - A legacy wrapper around :meth:`find_spec`. - - .. deprecated:: 3.4 - Use :meth:`find_spec` instead. - .. classmethod:: invalidate_caches() Calls :meth:`importlib.abc.PathEntryFinder.invalidate_caches` on all @@ -938,13 +837,6 @@ find and load modules. .. versionadded:: 3.4 - .. method:: find_loader(fullname) - - Attempt to find the loader to handle *fullname* within :attr:`path`. - - .. deprecated:: 3.10 - Use :meth:`find_spec` instead. - .. method:: invalidate_caches() Clear out the internal cache. diff --git a/Doc/library/inspect.rst b/Doc/library/inspect.rst index 88f843c03b1d5a..7884308a333020 100644 --- a/Doc/library/inspect.rst +++ b/Doc/library/inspect.rst @@ -1603,6 +1603,39 @@ the following flags: for any introspection needs. +Buffer flags +------------ + +.. class:: BufferFlags + + This is an :class:`enum.IntFlag` that represents the flags that + can be passed to the :meth:`~object.__buffer__` method of objects + implementing the :ref:`buffer protocol `. + + The meaning of the flags is explained at :ref:`buffer-request-types`. + + .. attribute:: BufferFlags.SIMPLE + .. attribute:: BufferFlags.WRITABLE + .. attribute:: BufferFlags.FORMAT + .. attribute:: BufferFlags.ND + .. attribute:: BufferFlags.STRIDES + .. attribute:: BufferFlags.C_CONTIGUOUS + .. attribute:: BufferFlags.F_CONTIGUOUS + .. attribute:: BufferFlags.ANY_CONTIGUOUS + .. attribute:: BufferFlags.INDIRECT + .. attribute:: BufferFlags.CONTIG + .. attribute:: BufferFlags.CONTIG_RO + .. attribute:: BufferFlags.STRIDED + .. attribute:: BufferFlags.STRIDED_RO + .. attribute:: BufferFlags.RECORDS + .. attribute:: BufferFlags.RECORDS_RO + .. attribute:: BufferFlags.FULL + .. attribute:: BufferFlags.FULL_RO + .. attribute:: BufferFlags.READ + .. attribute:: BufferFlags.WRITE + + .. versionadded:: 3.12 + .. _inspect-module-cli: Command Line Interface diff --git a/Doc/library/internet.rst b/Doc/library/internet.rst index ff58dcf4d89c36..681769a4820dba 100644 --- a/Doc/library/internet.rst +++ b/Doc/library/internet.rst @@ -9,7 +9,7 @@ Internet Protocols and Support single: Internet single: World Wide Web -.. index:: module: socket +.. index:: pair: module; socket The modules described in this chapter implement internet protocols and support for related technology. They are all implemented in Python. Most of these diff --git a/Doc/library/locale.rst b/Doc/library/locale.rst index f726f8397c9648..f2abb3638a141f 100644 --- a/Doc/library/locale.rst +++ b/Doc/library/locale.rst @@ -16,7 +16,7 @@ functionality. The POSIX locale mechanism allows programmers to deal with certain cultural issues in an application, without requiring the programmer to know all the specifics of each country where the software is executed. -.. index:: module: _locale +.. index:: pair: module; _locale The :mod:`locale` module is implemented on top of the :mod:`_locale` module, which in turn uses an ANSI C locale implementation if available. @@ -464,7 +464,7 @@ The :mod:`locale` module defines the following exception and functions: .. data:: LC_CTYPE - .. index:: module: string + .. index:: pair: module; string Locale category for the character type functions. Depending on the settings of this category, the functions of module :mod:`string` dealing with case change diff --git a/Doc/library/marshal.rst b/Doc/library/marshal.rst index 24f9dc1689da4a..0556f19699dc15 100644 --- a/Doc/library/marshal.rst +++ b/Doc/library/marshal.rst @@ -15,8 +15,8 @@ undocumented on purpose; it may change between Python versions (although it rarely does). [#]_ .. index:: - module: pickle - module: shelve + pair: module; pickle + pair: module; shelve This is not a general "persistence" module. For general persistence and transfer of Python objects through RPC calls, see the modules :mod:`pickle` and diff --git a/Doc/library/os.path.rst b/Doc/library/os.path.rst index 96bcb48ad7d126..7881c52db87090 100644 --- a/Doc/library/os.path.rst +++ b/Doc/library/os.path.rst @@ -159,7 +159,7 @@ the :mod:`glob` module.) On Unix and Windows, return the argument with an initial component of ``~`` or ``~user`` replaced by that *user*'s home directory. - .. index:: module: pwd + .. index:: pair: module; pwd On Unix, an initial ``~`` is replaced by the environment variable :envvar:`HOME` if it is set; otherwise the current user's home directory is looked up in the diff --git a/Doc/library/os.rst b/Doc/library/os.rst index 50e951c631fa88..641e289e77c518 100644 --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -1284,7 +1284,7 @@ or `the MSDN `_ on Windo .. function:: openpty() - .. index:: module: pty + .. index:: pair: module; pty Open a new pseudo-terminal pair. Return a pair of file descriptors ``(master, slave)`` for the pty and the tty, respectively. The new file @@ -2890,7 +2890,7 @@ features: possible and call :func:`lstat` on the result. This does not apply to dangling symlinks or junction points, which will raise the usual exceptions. - .. index:: module: stat + .. index:: pair: module; stat Example:: @@ -4593,7 +4593,7 @@ written in Python, such as a mail server's external command delivery program. :attr:`!children_system`, and :attr:`!elapsed` in that order. See the Unix manual page - :manpage:`times(2)` and `times(3) `_ manual page on Unix or `the GetProcessTimes MSDN + :manpage:`times(2)` and `times(3) `_ manual page on Unix or `the GetProcessTimes MSDN `_ on Windows. On Windows, only :attr:`!user` and :attr:`!system` are known; the other attributes are zero. diff --git a/Doc/library/pathlib.rst b/Doc/library/pathlib.rst index 8e91936680fab8..5ffa33d4e61f19 100644 --- a/Doc/library/pathlib.rst +++ b/Doc/library/pathlib.rst @@ -530,10 +530,10 @@ Pure paths provide the following methods and properties: unintended effects. -.. method:: PurePath.joinpath(*other) +.. method:: PurePath.joinpath(*pathsegments) Calling this method is equivalent to combining the path with each of - the *other* arguments in turn:: + the given *pathsegments* in turn:: >>> PurePosixPath('/etc').joinpath('passwd') PurePosixPath('/etc/passwd') @@ -680,6 +680,30 @@ Pure paths provide the following methods and properties: PureWindowsPath('README') +.. method:: PurePath.with_segments(*pathsegments) + + Create a new path object of the same type by combining the given + *pathsegments*. This method is called whenever a derivative path is created, + such as from :attr:`parent` and :meth:`relative_to`. Subclasses may + override this method to pass information to derivative paths, for example:: + + from pathlib import PurePosixPath + + class MyPath(PurePosixPath): + def __init__(self, *pathsegments, session_id): + super().__init__(*pathsegments) + self.session_id = session_id + + def with_segments(self, *pathsegments): + return type(self)(*pathsegments, session_id=self.session_id) + + etc = MyPath('/etc', session_id=42) + hosts = etc / 'hosts' + print(hosts.session_id) # 42 + + .. versionadded:: 3.12 + + .. _concrete-paths: @@ -819,9 +843,14 @@ call fails (for example because the path doesn't exist). .. versionchanged:: 3.10 The *follow_symlinks* parameter was added. -.. method:: Path.exists() +.. method:: Path.exists(*, follow_symlinks=True) + + Return ``True`` if the path points to an existing file or directory. + + This method normally follows symlinks; to check if a symlink exists, add + the argument ``follow_symlinks=False``. - Whether the path points to an existing file or directory:: + :: >>> Path('.').exists() True @@ -832,10 +861,8 @@ call fails (for example because the path doesn't exist). >>> Path('nonexistentfile').exists() False - .. note:: - If the path points to a symlink, :meth:`exists` returns whether the - symlink *points to* an existing file or directory. - + .. versionchanged:: 3.12 + The *follow_symlinks* parameter was added. .. method:: Path.expanduser() @@ -852,7 +879,7 @@ call fails (for example because the path doesn't exist). .. versionadded:: 3.5 -.. method:: Path.glob(pattern) +.. method:: Path.glob(pattern, *, case_sensitive=None) Glob the given relative *pattern* in the directory represented by this path, yielding all matching files (of any kind):: @@ -873,6 +900,11 @@ call fails (for example because the path doesn't exist). PosixPath('setup.py'), PosixPath('test_pathlib.py')] + By default, or when the *case_sensitive* keyword-only argument is set to + ``None``, this method matches paths using platform-specific casing rules: + typically, case-sensitive on POSIX, and case-insensitive on Windows. + Set *case_sensitive* to ``True`` or ``False`` to override this behaviour. + .. note:: Using the "``**``" pattern in large directory trees may consume an inordinate amount of time. @@ -883,6 +915,9 @@ call fails (for example because the path doesn't exist). Return only directories if *pattern* ends with a pathname components separator (:data:`~os.sep` or :data:`~os.altsep`). + .. versionadded:: 3.12 + The *case_sensitive* argument. + .. method:: Path.group() Return the name of the group owning the file. :exc:`KeyError` is raised @@ -1268,7 +1303,7 @@ call fails (for example because the path doesn't exist). .. versionadded:: 3.6 The *strict* argument (pre-3.6 behavior is strict). -.. method:: Path.rglob(pattern) +.. method:: Path.rglob(pattern, *, case_sensitive=None) Glob the given relative *pattern* recursively. This is like calling :func:`Path.glob` with "``**/``" added in front of the *pattern*, where @@ -1281,12 +1316,20 @@ call fails (for example because the path doesn't exist). PosixPath('setup.py'), PosixPath('test_pathlib.py')] + By default, or when the *case_sensitive* keyword-only argument is set to + ``None``, this method matches paths using platform-specific casing rules: + typically, case-sensitive on POSIX, and case-insensitive on Windows. + Set *case_sensitive* to ``True`` or ``False`` to override this behaviour. + .. audit-event:: pathlib.Path.rglob self,pattern pathlib.Path.rglob .. versionchanged:: 3.11 Return only directories if *pattern* ends with a pathname components separator (:data:`~os.sep` or :data:`~os.altsep`). + .. versionadded:: 3.12 + The *case_sensitive* argument. + .. method:: Path.rmdir() Remove this directory. The directory must be empty. diff --git a/Doc/library/pdb.rst b/Doc/library/pdb.rst index 5bc48a6d5f77fd..74bffef5562ae1 100644 --- a/Doc/library/pdb.rst +++ b/Doc/library/pdb.rst @@ -20,8 +20,8 @@ supports post-mortem debugging and can be called under program control. .. index:: single: Pdb (class in pdb) - module: bdb - module: cmd + pair: module; bdb + pair: module; cmd The debugger is extensible -- it is actually defined as the class :class:`Pdb`. This is currently undocumented but easily understood by reading the source. The @@ -263,6 +263,21 @@ the commands; the input is split at the first ``;;`` pair, even if it is in the middle of a quoted string. A workaround for strings with double semicolons is to use implicit string concatenation ``';'';'`` or ``";"";"``. +To set a temporary global variable, use a *convenience variable*. A *convenience +variable* is a variable whose name starts with ``$``. For example, ``$foo = 1`` +sets a global variable ``$foo`` which you can use in the debugger session. The +*convenience variables* are cleared when the program resumes execution so it's +less likely to interfere with your program compared to using normal variables +like ``foo = 1``. + +There are three preset *convenience variables*: + +* ``$_frame``: the current frame you are debugging +* ``$_retval``: the return value if the frame is returning +* ``$_exception``: the exception if the frame is raising an exception + +.. versionadded:: 3.12 + .. index:: pair: .pdbrc; file triple: debugger; configuration; file diff --git a/Doc/library/pkgutil.rst b/Doc/library/pkgutil.rst index 64e617b82b48bc..891a867d1ceb68 100644 --- a/Doc/library/pkgutil.rst +++ b/Doc/library/pkgutil.rst @@ -48,33 +48,6 @@ support. this function to raise an exception (in line with :func:`os.path.isdir` behavior). - -.. class:: ImpImporter(dirname=None) - - :pep:`302` Finder that wraps Python's "classic" import algorithm. - - If *dirname* is a string, a :pep:`302` finder is created that searches that - directory. If *dirname* is ``None``, a :pep:`302` finder is created that - searches the current :data:`sys.path`, plus any modules that are frozen or - built-in. - - Note that :class:`ImpImporter` does not currently support being used by - placement on :data:`sys.meta_path`. - - .. deprecated:: 3.3 - This emulation is no longer needed, as the standard import mechanism - is now fully :pep:`302` compliant and available in :mod:`importlib`. - - -.. class:: ImpLoader(fullname, file, filename, etc) - - :term:`Loader ` that wraps Python's "classic" import algorithm. - - .. deprecated:: 3.3 - This emulation is no longer needed, as the standard import mechanism - is now fully :pep:`302` compliant and available in :mod:`importlib`. - - .. function:: find_loader(fullname) Retrieve a module :term:`loader` for the given *fullname*. @@ -91,6 +64,10 @@ support. .. versionchanged:: 3.4 Updated to be based on :pep:`451` + .. deprecated-removed:: 3.12 3.14 + Use :func:`importlib.util.find_spec` instead. + + .. function:: get_importer(path_item) Retrieve a :term:`finder` for the given *path_item*. @@ -123,6 +100,9 @@ support. .. versionchanged:: 3.4 Updated to be based on :pep:`451` + .. deprecated-removed:: 3.12 3.14 + Use :func:`importlib.util.find_spec` instead. + .. function:: iter_importers(fullname='') diff --git a/Doc/library/plistlib.rst b/Doc/library/plistlib.rst index 7aad15ec91a0ac..732ef3536863cc 100644 --- a/Doc/library/plistlib.rst +++ b/Doc/library/plistlib.rst @@ -46,7 +46,7 @@ or :class:`datetime.datetime` objects. .. seealso:: - `PList manual page `_ + `PList manual page `_ Apple's documentation of the file format. diff --git a/Doc/library/posix.rst b/Doc/library/posix.rst index ec04b0dcfc162f..0413f9d02a8d57 100644 --- a/Doc/library/posix.rst +++ b/Doc/library/posix.rst @@ -11,7 +11,7 @@ This module provides access to operating system functionality that is standardized by the C Standard and the POSIX standard (a thinly disguised Unix interface). -.. index:: module: os +.. index:: pair: module; os **Do not import this module directly.** Instead, import the module :mod:`os`, which provides a *portable* version of this interface. On Unix, the :mod:`os` diff --git a/Doc/library/pprint.rst b/Doc/library/pprint.rst index 4e29192311fc21..d8269ef48cb36a 100644 --- a/Doc/library/pprint.rst +++ b/Doc/library/pprint.rst @@ -159,7 +159,7 @@ The :mod:`pprint` module defines one class: .. function:: isreadable(object) - .. index:: builtin: eval + .. index:: pair: built-in function; eval Determine if the formatted representation of *object* is "readable", or can be used to reconstruct the value using :func:`eval`. This always returns ``False`` @@ -218,7 +218,7 @@ created. .. method:: PrettyPrinter.isreadable(object) - .. index:: builtin: eval + .. index:: pair: built-in function; eval Determine if the formatted representation of the object is "readable," or can be used to reconstruct the value using :func:`eval`. Note that this returns diff --git a/Doc/library/pwd.rst b/Doc/library/pwd.rst index 98f3c45e29cbcb..7cafc66fd7e93c 100644 --- a/Doc/library/pwd.rst +++ b/Doc/library/pwd.rst @@ -39,7 +39,7 @@ raised if the entry asked for cannot be found. .. note:: - .. index:: module: crypt + .. index:: pair: module; crypt In traditional Unix the field ``pw_passwd`` usually contains a password encrypted with a DES derived algorithm (see module :mod:`crypt`). However most diff --git a/Doc/library/pyexpat.rst b/Doc/library/pyexpat.rst index d6581e21b01c0e..935e872480efda 100644 --- a/Doc/library/pyexpat.rst +++ b/Doc/library/pyexpat.rst @@ -33,7 +33,7 @@ can be set to handler functions. When an XML document is then fed to the parser, the handler functions are called for the character data and markup in the XML document. -.. index:: module: pyexpat +.. index:: pair: module; pyexpat This module uses the :mod:`pyexpat` module to provide access to the Expat parser. Direct use of the :mod:`pyexpat` module is deprecated. diff --git a/Doc/library/resource.rst b/Doc/library/resource.rst index e7bf45d7d569fa..a5324c82c63484 100644 --- a/Doc/library/resource.rst +++ b/Doc/library/resource.rst @@ -244,7 +244,7 @@ platform. used by all of this user id's processes. This limit is enforced only if bit 1 of the vm.overcommit sysctl is set. Please see - `tuning(7) `__ + `tuning(7) `__ for a complete description of this sysctl. .. availability:: FreeBSD. diff --git a/Doc/library/runpy.rst b/Doc/library/runpy.rst index 501f4ddf5a3e3f..42ed8c253b8027 100644 --- a/Doc/library/runpy.rst +++ b/Doc/library/runpy.rst @@ -30,7 +30,7 @@ The :mod:`runpy` module provides two functions: .. function:: run_module(mod_name, init_globals=None, run_name=None, alter_sys=False) .. index:: - module: __main__ + pair: module; __main__ Execute the code of the specified module and return the resulting module globals dictionary. The module's code is first located using the standard @@ -101,7 +101,7 @@ The :mod:`runpy` module provides two functions: .. function:: run_path(path_name, init_globals=None, run_name=None) .. index:: - module: __main__ + pair: module; __main__ Execute the code at the named filesystem location and return the resulting module globals dictionary. As with a script name supplied to the CPython diff --git a/Doc/library/select.rst b/Doc/library/select.rst index 2890706bab729c..b0891b0c8f584a 100644 --- a/Doc/library/select.rst +++ b/Doc/library/select.rst @@ -505,7 +505,7 @@ Kqueue Objects Kevent Objects -------------- -https://www.freebsd.org/cgi/man.cgi?query=kqueue&sektion=2 +https://man.freebsd.org/cgi/man.cgi?query=kqueue&sektion=2 .. attribute:: kevent.ident diff --git a/Doc/library/shelve.rst b/Doc/library/shelve.rst index a50fc6f0bf77b2..dc87af398ed757 100644 --- a/Doc/library/shelve.rst +++ b/Doc/library/shelve.rst @@ -6,7 +6,7 @@ **Source code:** :source:`Lib/shelve.py` -.. index:: module: pickle +.. index:: pair: module; pickle -------------- @@ -95,8 +95,8 @@ Restrictions ------------ .. index:: - module: dbm.ndbm - module: dbm.gnu + pair: module; dbm.ndbm + pair: module; dbm.gnu * The choice of which database package will be used (such as :mod:`dbm.ndbm` or :mod:`dbm.gnu`) depends on which interface is available. Therefore it is not diff --git a/Doc/library/site.rst b/Doc/library/site.rst index 4a88013f1d6ed2..44f90a3b9e496f 100644 --- a/Doc/library/site.rst +++ b/Doc/library/site.rst @@ -51,7 +51,7 @@ searched for site-packages; otherwise they will. .. index:: single: # (hash); comment - statement: import + pair: statement; import A path configuration file is a file whose name has the form :file:`{name}.pth` and exists in one of the four directories mentioned above; its contents are @@ -109,7 +109,7 @@ directory precedes the :file:`foo` directory because :file:`bar.pth` comes alphabetically before :file:`foo.pth`; and :file:`spam` is omitted because it is not mentioned in either path configuration file. -.. index:: module: sitecustomize +.. index:: pair: module; sitecustomize After these path manipulations, an attempt is made to import a module named :mod:`sitecustomize`, which can perform arbitrary site-specific customizations. @@ -121,7 +121,7 @@ with :file:`pythonw.exe` on Windows (which is used by default to start IDLE), attempted output from :mod:`sitecustomize` is ignored. Any other exception causes a silent and perhaps mysterious failure of the process. -.. index:: module: usercustomize +.. index:: pair: module; usercustomize After this, an attempt is made to import a module named :mod:`usercustomize`, which can perform arbitrary user-specific customizations, if diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst index c8ca555700a3c9..13a82cf82d5908 100644 --- a/Doc/library/socket.rst +++ b/Doc/library/socket.rst @@ -19,7 +19,7 @@ all modern Unix systems, Windows, MacOS, and probably additional platforms. .. include:: ../includes/wasm-notavail.rst -.. index:: object: socket +.. index:: pair: object; socket The Python interface is a straightforward transliteration of the Unix system call and library interface for sockets to Python's object-oriented style: the @@ -509,6 +509,17 @@ Constants .. versionadded:: 3.9 +.. data:: AF_DIVERT + PF_DIVERT + + These two constants, documented in the FreeBSD divert(4) manual page, are + also defined in the socket module. + + .. availability:: FreeBSD >= 14.0. + + .. versionadded:: 3.12 + + .. data:: AF_PACKET PF_PACKET PACKET_* @@ -1916,7 +1927,7 @@ to sockets. .. method:: socket.setsockopt(level, optname, None, optlen: int) :noindex: - .. index:: module: struct + .. index:: pair: module; struct Set the value of the given socket option (see the Unix manual page :manpage:`setsockopt(2)`). The needed symbolic constants are defined in the diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst index 4b60b7c643b62c..18a6c5ab4858a4 100644 --- a/Doc/library/ssl.rst +++ b/Doc/library/ssl.rst @@ -1719,7 +1719,7 @@ to speed up repeated connections from the same clients. .. versionadded:: 3.3 .. seealso:: - `SSL/TLS & Perfect Forward Secrecy `_ + `SSL/TLS & Perfect Forward Secrecy `_ Vincent Bernat. .. method:: SSLContext.wrap_socket(sock, server_side=False, \ diff --git a/Doc/library/statistics.rst b/Doc/library/statistics.rst index f934b0e0319dca..395b324c860389 100644 --- a/Doc/library/statistics.rst +++ b/Doc/library/statistics.rst @@ -22,7 +22,7 @@ This module provides functions for calculating mathematical statistics of numeric (:class:`~numbers.Real`-valued) data. The module is not intended to be a competitor to third-party libraries such -as `NumPy `_, `SciPy `_, or +as `NumPy `_, `SciPy `_, or proprietary full-featured statistics packages aimed at professional statisticians such as Minitab, SAS and Matlab. It is aimed at the level of graphing and scientific calculators. diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst index 2360472b31f175..9203afbf6a4e8a 100644 --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -32,8 +32,8 @@ Truth Value Testing =================== .. index:: - statement: if - statement: while + pair: statement; if + pair: statement; while pair: truth; value pair: Boolean; operations single: false @@ -61,8 +61,8 @@ objects considered false: ``range(0)`` .. index:: - operator: or - operator: and + pair: operator; or + pair: operator; and single: False single: True @@ -95,9 +95,9 @@ These are the Boolean operations, ordered by ascending priority: +-------------+---------------------------------+-------+ .. index:: - operator: and - operator: or - operator: not + pair: operator; and + pair: operator; or + pair: operator; not Notes: @@ -122,14 +122,14 @@ Comparisons .. index:: pair: chaining; comparisons pair: operator; comparison - operator: == - operator: < (less) - operator: <= - operator: > (greater) - operator: >= - operator: != - operator: is - operator: is not + pair: operator; == + pair: operator; < (less) + pair: operator; <= + pair: operator; > (greater) + pair: operator; >= + pair: operator; != + pair: operator; is + pair: operator; is not There are eight comparison operations in Python. They all have the same priority (which is higher than that of the Boolean operations). Comparisons can @@ -192,8 +192,8 @@ customized; also they can be applied to any two objects and never raise an exception. .. index:: - operator: in - operator: not in + pair: operator; in + pair: operator; not in Two more operations with the same syntactic priority, :keyword:`in` and :keyword:`not in`, are supported by types that are :term:`iterable` or @@ -205,11 +205,11 @@ Numeric Types --- :class:`int`, :class:`float`, :class:`complex` ================================================================ .. index:: - object: numeric - object: Boolean - object: integer - object: floating point - object: complex number + pair: object; numeric + pair: object; Boolean + pair: object; integer + pair: object; floating point + pair: object; complex number pair: C; language There are three distinct numeric types: :dfn:`integers`, :dfn:`floating @@ -244,20 +244,20 @@ and imaginary parts. .. index:: single: arithmetic - builtin: int - builtin: float - builtin: complex + pair: built-in function; int + pair: built-in function; float + pair: built-in function; complex single: operator; + (plus) single: + (plus); unary operator single: + (plus); binary operator single: operator; - (minus) single: - (minus); unary operator single: - (minus); binary operator - operator: * (asterisk) - operator: / (slash) - operator: // - operator: % (percent) - operator: ** + pair: operator; * (asterisk) + pair: operator; / (slash) + pair: operator; // + pair: operator; % (percent) + pair: operator; ** Python fully supports mixed arithmetic: when a binary arithmetic operator has operands of different numeric types, the operand with the "narrower" type is @@ -330,7 +330,7 @@ Notes: (3) .. index:: - module: math + pair: module; math single: floor() (in module math) single: ceil() (in module math) single: trunc() (in module math) @@ -392,12 +392,12 @@ Bitwise Operations on Integer Types pair: bitwise; operations pair: shifting; operations pair: masking; operations - operator: | (vertical bar) - operator: ^ (caret) - operator: & (ampersand) - operator: << - operator: >> - operator: ~ (tilde) + pair: operator; | (vertical bar) + pair: operator; ^ (caret) + pair: operator; & (ampersand) + pair: operator; << + pair: operator; >> + pair: operator; ~ (tilde) Bitwise operations only make sense for integers. The result of bitwise operations is calculated as though carried out in two's complement with an @@ -802,6 +802,39 @@ number, :class:`float`, or :class:`complex`:: hash_value = -2 return hash_value +.. _typebool: + +Boolean Type - :class:`bool` +============================ + +Booleans represent truth values. The :class:`bool` type has exactly two +constant instances: ``True`` and ``False``. + +.. index:: + single: False + single: True + pair: Boolean; values + +The built-in function :func:`bool` converts any value to a boolean, if the +value can be interpreted as a truth value (see section :ref:`truth` above). + +For logical operations, use the :ref:`boolean operators ` ``and``, +``or`` and ``not``. +When applying the bitwise operators ``&``, ``|``, ``^`` to two booleans, they +return a bool equivalent to the logical operations "and", "or", "xor". However, +the logical operators ``and``, ``or`` and ``!=`` should be preferred +over ``&``, ``|`` and ``^``. + +.. deprecated:: 3.12 + + The use of the bitwise inversion operator ``~`` is deprecated and will + raise an error in Python 3.14. + +:class:`bool` is a subclass of :class:`int` (see :ref:`typesnumeric`). In +many numeric contexts, ``False`` and ``True`` behave like the integers 0 and 1, respectively. +However, relying on this is discouraged; explicitly convert using :func:`int` +instead. + .. _typeiter: Iterator Types @@ -894,7 +927,7 @@ described in dedicated sections. Common Sequence Operations -------------------------- -.. index:: object: sequence +.. index:: pair: object; sequence The operations in the following table are supported by most sequence types, both mutable and immutable. The :class:`collections.abc.Sequence` ABC is @@ -912,15 +945,15 @@ operations have the same priority as the corresponding numeric operations. [3]_ .. index:: triple: operations on; sequence; types - builtin: len - builtin: min - builtin: max + pair: built-in function; len + pair: built-in function; min + pair: built-in function; max pair: concatenation; operation pair: repetition; operation pair: subscript; operation pair: slice; operation - operator: in - operator: not in + pair: operator; in + pair: operator; not in single: count() (sequence method) single: index() (sequence method) @@ -1079,8 +1112,8 @@ Immutable Sequence Types .. index:: triple: immutable; sequence; types - object: tuple - builtin: hash + pair: object; tuple + pair: built-in function; hash The only operation that immutable sequence types generally implement that is not also implemented by mutable sequence types is support for the :func:`hash` @@ -1101,8 +1134,8 @@ Mutable Sequence Types .. index:: triple: mutable; sequence; types - object: list - object: bytearray + pair: object; list + pair: object; bytearray The operations in the following table are defined on mutable sequence types. The :class:`collections.abc.MutableSequence` ABC is provided to make it @@ -1119,7 +1152,7 @@ accepts integers that meet the value restriction ``0 <= x <= 255``). triple: operations on; list; type pair: subscript; assignment pair: slice; assignment - statement: del + pair: statement; del single: append() (sequence method) single: clear() (sequence method) single: copy() (sequence method) @@ -1219,7 +1252,7 @@ Notes: Lists ----- -.. index:: object: list +.. index:: pair: object; list Lists are mutable sequences, typically used to store collections of homogeneous items (where the precise degree of similarity will vary by @@ -1298,7 +1331,7 @@ application). Tuples ------ -.. index:: object: tuple +.. index:: pair: object; tuple Tuples are immutable sequences, typically used to store collections of heterogeneous data (such as the 2-tuples produced by the :func:`enumerate` @@ -1342,7 +1375,7 @@ choice than a simple tuple object. Ranges ------ -.. index:: object: range +.. index:: pair: object; range The :class:`range` type represents an immutable sequence of numbers and is commonly used for looping a specific number of times in :keyword:`for` @@ -1467,7 +1500,7 @@ objects that compare equal might have different :attr:`~range.start`, .. index:: single: string; text sequence type single: str (built-in class); (see also string) - object: string + pair: object; string .. _textseq: @@ -1501,7 +1534,7 @@ Since there is no separate "character" type, indexing a string produces strings of length 1. That is, for a non-empty string *s*, ``s[0] == s[0:1]``. .. index:: - object: io.StringIO + pair: object; io.StringIO There is also no mutable string type, but :meth:`str.join` or :class:`io.StringIO` can be used to efficiently construct strings from @@ -1567,7 +1600,7 @@ String Methods -------------- .. index:: - module: re + pair: module; re Strings implement all of the :ref:`common ` sequence operations, along with the additional methods described below. @@ -2475,10 +2508,10 @@ Binary Sequence Types --- :class:`bytes`, :class:`bytearray`, :class:`memoryview ================================================================================= .. index:: - object: bytes - object: bytearray - object: memoryview - module: array + pair: object; bytes + pair: object; bytearray + pair: object; memoryview + pair: module; array The core built-in types for manipulating binary data are :class:`bytes` and :class:`bytearray`. They are supported by :class:`memoryview` which uses @@ -2493,7 +2526,7 @@ The :mod:`array` module supports efficient storage of basic data types like Bytes Objects ------------- -.. index:: object: bytes +.. index:: pair: object; bytes Bytes objects are immutable sequences of single bytes. Since many major binary protocols are based on the ASCII text encoding, bytes objects offer @@ -2600,7 +2633,7 @@ always convert a bytes object into a list of integers using ``list(b)``. Bytearray Objects ----------------- -.. index:: object: bytearray +.. index:: pair: object; bytearray :class:`bytearray` objects are a mutable counterpart to :class:`bytes` objects. @@ -4179,7 +4212,7 @@ copying. Set Types --- :class:`set`, :class:`frozenset` ============================================== -.. index:: object: set +.. index:: pair: object; set A :dfn:`set` object is an unordered collection of distinct :term:`hashable` objects. Common uses include membership testing, removing duplicates from a sequence, and @@ -4381,12 +4414,12 @@ Mapping Types --- :class:`dict` =============================== .. index:: - object: mapping - object: dictionary + pair: object; mapping + pair: object; dictionary triple: operations on; mapping; types triple: operations on; dictionary; type - statement: del - builtin: len + pair: statement; del + pair: built-in function; len A :term:`mapping` object maps :term:`hashable` values to arbitrary objects. Mappings are mutable objects. There is currently only one standard mapping @@ -4856,7 +4889,7 @@ Generic Alias Type ------------------ .. index:: - object: GenericAlias + pair: object; GenericAlias pair: Generic; Alias ``GenericAlias`` objects are generally created by @@ -5111,7 +5144,7 @@ Union Type ---------- .. index:: - object: Union + pair: object; Union pair: union; type A union object holds the value of the ``|`` (bitwise or) operation on @@ -5268,7 +5301,7 @@ See :ref:`function` for more information. Methods ------- -.. index:: object: method +.. index:: pair: object; method Methods are functions that are called using the attribute notation. There are two flavors: built-in methods (such as :meth:`append` on lists) and class @@ -5315,7 +5348,7 @@ Code Objects ------------ .. index:: - builtin: compile + pair: built-in function; compile single: __code__ (function object attribute) Code objects are used by the implementation to represent "pseudo-compiled" @@ -5329,8 +5362,8 @@ Accessing ``__code__`` raises an :ref:`auditing event ` ``object.__getattr__`` with arguments ``obj`` and ``"__code__"``. .. index:: - builtin: exec - builtin: eval + pair: built-in function; exec + pair: built-in function; eval A code object can be executed or evaluated by passing it (instead of a source string) to the :func:`exec` or :func:`eval` built-in functions. @@ -5344,8 +5377,8 @@ Type Objects ------------ .. index:: - builtin: type - module: types + pair: built-in function; type + pair: module; types Type objects represent the various object types. An object's type is accessed by the built-in function :func:`type`. There are no special operations on @@ -5394,27 +5427,6 @@ information. There is exactly one ``NotImplemented`` object. It is written as ``NotImplemented``. -.. _bltin-boolean-values: - -Boolean Values --------------- - -Boolean values are the two constant objects ``False`` and ``True``. They are -used to represent truth values (although other values can also be considered -false or true). In numeric contexts (for example when used as the argument to -an arithmetic operator), they behave like the integers 0 and 1, respectively. -The built-in function :func:`bool` can be used to convert any value to a -Boolean, if the value can be interpreted as a truth value (see section -:ref:`truth` above). - -.. index:: - single: False - single: True - pair: Boolean; values - -They are written as ``False`` and ``True``, respectively. - - .. _typesinternal: Internal Objects diff --git a/Doc/library/struct.rst b/Doc/library/struct.rst index 9c0e32ba16bf68..78fd6e397ae635 100644 --- a/Doc/library/struct.rst +++ b/Doc/library/struct.rst @@ -602,4 +602,4 @@ The :mod:`struct` module also defines the following type: .. _ieee 754 standard: https://en.wikipedia.org/wiki/IEEE_754-2008_revision -.. _IETF RFC 1700: https://tools.ietf.org/html/rfc1700 +.. _IETF RFC 1700: https://datatracker.ietf.org/doc/html/rfc1700 diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst index 7c0e85142e7716..bacf8ceac5041e 100644 --- a/Doc/library/sys.rst +++ b/Doc/library/sys.rst @@ -444,7 +444,7 @@ always available. object ` which typically encapsulates the call stack at the point where the exception last occurred. - .. index:: object: traceback + .. index:: pair: object; traceback If no exception is being handled anywhere on the stack, this function return a tuple containing three ``None`` values. @@ -792,7 +792,7 @@ always available. additional garbage collector overhead if the object is managed by the garbage collector. - See `recursive sizeof recipe `_ + See `recursive sizeof recipe `_ for an example of using :func:`getsizeof` recursively to find the size of containers and all their contents. @@ -1177,7 +1177,7 @@ always available. :term:`Module specs ` were introduced in Python 3.4, by :pep:`451`. Earlier versions of Python looked for a method called - :meth:`~importlib.abc.MetaPathFinder.find_module`. + :meth:`!find_module`. This is still called as a fallback if a :data:`meta_path` entry doesn't have a :meth:`~importlib.abc.MetaPathFinder.find_spec` method. diff --git a/Doc/library/traceback.rst b/Doc/library/traceback.rst index 561c85290463ef..5c0e261b90763c 100644 --- a/Doc/library/traceback.rst +++ b/Doc/library/traceback.rst @@ -14,7 +14,7 @@ interpreter when it prints a stack trace. This is useful when you want to print stack traces under program control, such as in a "wrapper" around the interpreter. -.. index:: object: traceback +.. index:: pair: object; traceback The module uses traceback objects --- these are objects of type :class:`types.TracebackType`, which are assigned to the ``__traceback__`` field of :class:`BaseException` instances. diff --git a/Doc/library/types.rst b/Doc/library/types.rst index a15fb5cfa49473..8cbe17df16f107 100644 --- a/Doc/library/types.rst +++ b/Doc/library/types.rst @@ -186,7 +186,7 @@ Standard names are defined for the following types: .. class:: CodeType(**kwargs) - .. index:: builtin: compile + .. index:: pair: built-in function; compile The type for code objects such as returned by :func:`compile`. diff --git a/Doc/library/typing.rst b/Doc/library/typing.rst index 409a95d528b5d3..ebab1389f07e58 100644 --- a/Doc/library/typing.rst +++ b/Doc/library/typing.rst @@ -2130,17 +2130,11 @@ Corresponding to collections in :mod:`collections.abc` .. class:: ByteString(Sequence[int]) - A generic version of :class:`collections.abc.ByteString`. - This type represents the types :class:`bytes`, :class:`bytearray`, and :class:`memoryview` of byte sequences. - As a shorthand for this type, :class:`bytes` can be used to - annotate arguments of any of the types mentioned above. - - .. deprecated:: 3.9 - :class:`collections.abc.ByteString` now supports subscripting (``[]``). - See :pep:`585` and :ref:`types-genericalias`. + .. deprecated-removed:: 3.9 3.14 + Prefer :class:`collections.abc.Buffer`, or a union like ``bytes | bytearray | memoryview``. .. class:: Collection(Sized, Iterable[T_co], Container[T_co]) @@ -2484,15 +2478,16 @@ Functions and decorators Ask a static type checker to confirm that *val* has an inferred type of *typ*. - When the type checker encounters a call to ``assert_type()``, it + At runtime this does nothing: it returns the first argument unchanged with no + checks or side effects, no matter the actual type of the argument. + + When a static type checker encounters a call to ``assert_type()``, it emits an error if the value is not of the specified type:: def greet(name: str) -> None: assert_type(name, str) # OK, inferred type of `name` is `str` assert_type(name, int) # type checker error - At runtime this returns the first argument unchanged with no side effects. - This function is useful for ensuring the type checker's understanding of a script is in line with the developer's intentions:: @@ -2977,6 +2972,8 @@ convenience. This is subject to change, and not all deprecations are listed. | ``typing`` versions of standard | 3.9 | Undecided | :pep:`585` | | collections | | | | +----------------------------------+---------------+-------------------+----------------+ +| ``typing.ByteString`` | 3.9 | 3.14 | :gh:`91896` | ++----------------------------------+---------------+-------------------+----------------+ | ``typing.Text`` | 3.11 | Undecided | :gh:`92332` | +----------------------------------+---------------+-------------------+----------------+ | ``typing.Hashable`` and | 3.12 | Undecided | :gh:`94309` | diff --git a/Doc/library/unittest.mock-examples.rst b/Doc/library/unittest.mock-examples.rst index f9a207bad6903f..895b9f9f07671b 100644 --- a/Doc/library/unittest.mock-examples.rst +++ b/Doc/library/unittest.mock-examples.rst @@ -1074,7 +1074,7 @@ subclass. Sometimes this is inconvenient. For example, `one user `_ is subclassing mock to created a `Twisted adaptor -`_. +`_. Having this applied to attributes too actually causes errors. ``Mock`` (in all its flavours) uses a method called ``_get_child_mock`` to create diff --git a/Doc/library/unittest.rst b/Doc/library/unittest.rst index c70153dfcd69e1..b26e6c0e6bc024 100644 --- a/Doc/library/unittest.rst +++ b/Doc/library/unittest.rst @@ -72,7 +72,7 @@ test runner a GUI tool for test discovery and execution. This is intended largely for ease of use for those new to unit testing. For production environments it is recommended that tests be driven by a continuous integration system such as - `Buildbot `_, `Jenkins `_, + `Buildbot `_, `Jenkins `_, `GitHub Actions `_, or `AppVeyor `_. diff --git a/Doc/library/xmlrpc.client.rst b/Doc/library/xmlrpc.client.rst index bd2c49a6edab7f..146c4fd768233b 100644 --- a/Doc/library/xmlrpc.client.rst +++ b/Doc/library/xmlrpc.client.rst @@ -161,7 +161,7 @@ between conformable Python objects and XML on the wire. .. seealso:: - `XML-RPC HOWTO `_ + `XML-RPC HOWTO `_ A good description of XML-RPC operation and client software in several languages. Contains pretty much everything an XML-RPC client developer needs to know. diff --git a/Doc/library/zipfile.rst b/Doc/library/zipfile.rst index 6f4826cb065c64..45f3d340bd82d3 100644 --- a/Doc/library/zipfile.rst +++ b/Doc/library/zipfile.rst @@ -128,7 +128,7 @@ The module defines the following items: Documentation on the ZIP file format by Phil Katz, the creator of the format and algorithms used. - `Info-ZIP Home Page `_ + `Info-ZIP Home Page `_ Information about the Info-ZIP project's ZIP archive programs and development libraries. diff --git a/Doc/library/zipimport.rst b/Doc/library/zipimport.rst index fe1adcae163c23..11d19e8c863e9f 100644 --- a/Doc/library/zipimport.rst +++ b/Doc/library/zipimport.rst @@ -74,6 +74,11 @@ zipimporter Objects :exc:`ZipImportError` is raised if *archivepath* doesn't point to a valid ZIP archive. + .. versionchanged:: 3.12 + + Methods ``find_loader()`` and ``find_module()``, deprecated in 3.10 are + now removed. Use :meth:`find_spec` instead. + .. method:: create_module(spec) Implementation of :meth:`importlib.abc.Loader.create_module` that returns @@ -89,28 +94,6 @@ zipimporter Objects .. versionadded:: 3.10 - .. method:: find_loader(fullname, path=None) - - An implementation of :meth:`importlib.abc.PathEntryFinder.find_loader`. - - .. deprecated:: 3.10 - - Use :meth:`find_spec` instead. - - - .. method:: find_module(fullname, path=None) - - Search for a module specified by *fullname*. *fullname* must be the fully - qualified (dotted) module name. It returns the zipimporter instance itself - if the module was found, or :const:`None` if it wasn't. The optional - *path* argument is ignored---it's there for compatibility with the - importer protocol. - - .. deprecated:: 3.10 - - Use :meth:`find_spec` instead. - - .. method:: find_spec(fullname, target=None) An implementation of :meth:`importlib.abc.PathEntryFinder.find_spec`. diff --git a/Doc/reference/compound_stmts.rst b/Doc/reference/compound_stmts.rst index f0a8936c35bf4a..9d1e5b6c596d9f 100644 --- a/Doc/reference/compound_stmts.rst +++ b/Doc/reference/compound_stmts.rst @@ -84,9 +84,9 @@ The :keyword:`!if` statement ============================ .. index:: - ! statement: if - keyword: elif - keyword: else + ! pair: statement; if + pair: keyword; elif + pair: keyword; else single: : (colon); compound statement The :keyword:`if` statement is used for conditional execution: @@ -109,8 +109,8 @@ The :keyword:`!while` statement =============================== .. index:: - ! statement: while - keyword: else + ! pair: statement; while + pair: keyword; else pair: loop; statement single: : (colon); compound statement @@ -127,8 +127,8 @@ suite of the :keyword:`!else` clause, if present, is executed and the loop terminates. .. index:: - statement: break - statement: continue + pair: statement; break + pair: statement; continue A :keyword:`break` statement executed in the first suite terminates the loop without executing the :keyword:`!else` clause's suite. A :keyword:`continue` @@ -142,12 +142,12 @@ The :keyword:`!for` statement ============================= .. index:: - ! statement: for - keyword: in - keyword: else + ! pair: statement; for + pair: keyword; in + pair: keyword; else pair: target; list pair: loop; statement - object: sequence + pair: object; sequence single: : (colon); compound statement The :keyword:`for` statement is used to iterate over the elements of a sequence @@ -167,8 +167,8 @@ the suite in the :keyword:`!else` clause, if present, is executed, and the loop terminates. .. index:: - statement: break - statement: continue + pair: statement; break + pair: statement; continue A :keyword:`break` statement executed in the first suite terminates the loop without executing the :keyword:`!else` clause's suite. A :keyword:`continue` @@ -188,7 +188,7 @@ those made in the suite of the for-loop:: .. index:: - builtin: range + pair: built-in function; range Names in the target list are not deleted when the loop is finished, but if the sequence is empty, they will not have been assigned to at all by the loop. Hint: @@ -205,11 +205,11 @@ The :keyword:`!try` statement ============================= .. index:: - ! statement: try - keyword: except - keyword: finally - keyword: else - keyword: as + ! pair: statement; try + pair: keyword; except + pair: keyword; finally + pair: keyword; else + pair: keyword; as single: : (colon); compound statement The :keyword:`!try` statement specifies exception handlers and/or cleanup code @@ -297,8 +297,8 @@ traceback attached to them, they form a reference cycle with the stack frame, keeping all locals in that frame alive until the next garbage collection occurs. .. index:: - module: sys - object: traceback + pair: module; sys + pair: object; traceback Before an :keyword:`!except` clause's suite is executed, the exception is stored in the :mod:`sys` module, where it can be accessed @@ -326,7 +326,7 @@ stored in the :mod:`sys` module is reset to its previous value:: .. index:: - keyword: except_star + pair: keyword; except_star .. _except_star: @@ -362,8 +362,10 @@ one :keyword:`!except*` clause, the first that matches it. :: Any remaining exceptions that were not handled by any :keyword:`!except*` -clause are re-raised at the end, combined into an exception group along with -all exceptions that were raised from within :keyword:`!except*` clauses. +clause are re-raised at the end, along with all exceptions that were +raised from within the :keyword:`!except*` clauses. If this list contains +more than one exception to reraise, they are combined into an exception +group. If the raised exception is not an exception group and its type matches one of the :keyword:`!except*` clauses, it is caught and wrapped by an @@ -385,10 +387,10 @@ cannot appear in an :keyword:`!except*` clause. .. index:: - keyword: else - statement: return - statement: break - statement: continue + pair: keyword; else + pair: statement; return + pair: statement; break + pair: statement; continue .. _except_else: @@ -402,7 +404,7 @@ the :keyword:`!else` clause are not handled by the preceding :keyword:`except` clauses. -.. index:: keyword: finally +.. index:: pair: keyword; finally .. _finally: @@ -432,9 +434,9 @@ The exception information is not available to the program during execution of the :keyword:`!finally` clause. .. index:: - statement: return - statement: break - statement: continue + pair: statement; return + pair: statement; break + pair: statement; continue When a :keyword:`return`, :keyword:`break` or :keyword:`continue` statement is executed in the :keyword:`try` suite of a :keyword:`!try`...\ :keyword:`!finally` @@ -466,8 +468,8 @@ The :keyword:`!with` statement ============================== .. index:: - ! statement: with - keyword: as + ! pair: statement; with + pair: keyword; as single: as; with statement single: , (comma); with statement single: : (colon); compound statement @@ -583,11 +585,11 @@ The :keyword:`!match` statement =============================== .. index:: - ! statement: match - ! keyword: case + ! pair: statement; match + ! pair: keyword; case ! single: pattern matching - keyword: if - keyword: as + pair: keyword; if + pair: keyword; as pair: match; case single: as; match statement single: : (colon); compound statement @@ -1188,12 +1190,12 @@ Function definitions ==================== .. index:: - statement: def + pair: statement; def pair: function; definition pair: function; name pair: name; binding - object: user-defined function - object: function + pair: object; user-defined function + pair: object; function pair: function; name pair: name; binding single: () (parentheses); function definition @@ -1361,8 +1363,8 @@ Class definitions ================= .. index:: - object: class - statement: class + pair: object; class + pair: statement; class pair: class; definition pair: class; name pair: name; binding @@ -1461,7 +1463,7 @@ Coroutines .. versionadded:: 3.5 -.. index:: statement: async def +.. index:: pair: statement; async def .. _`async def`: Coroutine function definition @@ -1472,8 +1474,8 @@ Coroutine function definition : ["->" `expression`] ":" `suite` .. index:: - keyword: async - keyword: await + pair: keyword; async + pair: keyword; await Execution of Python coroutines can be suspended and resumed at many points (see :term:`coroutine`). :keyword:`await` expressions, :keyword:`async for` and @@ -1495,7 +1497,7 @@ An example of a coroutine function:: ``await`` and ``async`` are now keywords; previously they were only treated as such inside the body of a coroutine function. -.. index:: statement: async for +.. index:: pair: statement; async for .. _`async for`: The :keyword:`!async for` statement @@ -1540,7 +1542,7 @@ It is a :exc:`SyntaxError` to use an ``async for`` statement outside the body of a coroutine function. -.. index:: statement: async with +.. index:: pair: statement; async with .. _`async with`: The :keyword:`!async with` statement diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst index 55431f1951e50d..c0734e49f29192 100644 --- a/Doc/reference/datamodel.rst +++ b/Doc/reference/datamodel.rst @@ -21,8 +21,8 @@ conformance to Von Neumann's model of a "stored program computer", code is also represented by objects.) .. index:: - builtin: id - builtin: type + pair: built-in function; id + pair: built-in function; type single: identity of an object single: value of an object single: type of an object @@ -142,7 +142,7 @@ attributes.' These are attributes that provide access to the implementation and are not intended for general use. Their definition may change in the future. None - .. index:: object: None + .. index:: pair: object; None This type has a single value. There is a single object with this value. This object is accessed through the built-in name ``None``. It is used to signify the @@ -150,7 +150,7 @@ None don't explicitly return anything. Its truth value is false. NotImplemented - .. index:: object: NotImplemented + .. index:: pair: object; NotImplemented This type has a single value. There is a single object with this value. This object is accessed through the built-in name ``NotImplemented``. Numeric methods @@ -171,7 +171,7 @@ NotImplemented Ellipsis .. index:: - object: Ellipsis + pair: object; Ellipsis single: ...; ellipsis literal This type has a single value. There is a single object with this value. This @@ -179,7 +179,7 @@ Ellipsis ``Ellipsis``. Its truth value is true. :class:`numbers.Number` - .. index:: object: numeric + .. index:: pair: object; numeric These are created by numeric literals and returned as results by arithmetic operators and arithmetic built-in functions. Numeric objects are immutable; @@ -209,7 +209,7 @@ Ellipsis numbers: :class:`numbers.Integral` - .. index:: object: integer + .. index:: pair: object; integer These represent elements from the mathematical set of integers (positive and negative). @@ -225,7 +225,7 @@ Ellipsis Booleans (:class:`bool`) .. index:: - object: Boolean + pair: object; Boolean single: False single: True @@ -242,7 +242,7 @@ Ellipsis :class:`numbers.Real` (:class:`float`) .. index:: - object: floating point + pair: object; floating point pair: floating point; number pair: C; language pair: Java; language @@ -257,7 +257,7 @@ Ellipsis :class:`numbers.Complex` (:class:`complex`) .. index:: - object: complex + pair: object; complex pair: complex; number These represent complex numbers as a pair of machine-level double precision @@ -267,8 +267,8 @@ Ellipsis Sequences .. index:: - builtin: len - object: sequence + pair: built-in function; len + pair: object; sequence single: index operation single: item selection single: subscription @@ -293,8 +293,8 @@ Sequences Immutable sequences .. index:: - object: immutable sequence - object: immutable + pair: object; immutable sequence + pair: object; immutable An object of an immutable sequence type cannot change once it is created. (If the object contains references to other objects, these other objects may be @@ -308,8 +308,8 @@ Sequences Strings .. index:: - builtin: chr - builtin: ord + pair: built-in function; chr + pair: built-in function; ord single: character single: integer single: Unicode @@ -328,7 +328,7 @@ Sequences Tuples .. index:: - object: tuple + pair: object; tuple pair: singleton; tuple pair: empty; tuple @@ -350,8 +350,8 @@ Sequences Mutable sequences .. index:: - object: mutable sequence - object: mutable + pair: object; mutable sequence + pair: object; mutable pair: assignment; statement single: subscription single: slicing @@ -363,7 +363,7 @@ Sequences There are currently two intrinsic mutable sequence types: Lists - .. index:: object: list + .. index:: pair: object; list The items of a list are arbitrary Python objects. Lists are formed by placing a comma-separated list of expressions in square brackets. (Note @@ -377,15 +377,15 @@ Sequences (and hence unhashable), byte arrays otherwise provide the same interface and functionality as immutable :class:`bytes` objects. - .. index:: module: array + .. index:: pair: module; array The extension module :mod:`array` provides an additional example of a mutable sequence type, as does the :mod:`collections` module. Set types .. index:: - builtin: len - object: set type + pair: built-in function; len + pair: object; set type These represent unordered, finite sets of unique, immutable objects. As such, they cannot be indexed by any subscript. However, they can be iterated over, and @@ -402,14 +402,14 @@ Set types There are currently two intrinsic set types: Sets - .. index:: object: set + .. index:: pair: object; set These represent a mutable set. They are created by the built-in :func:`set` constructor and can be modified afterwards by several methods, such as :meth:`~set.add`. Frozen sets - .. index:: object: frozenset + .. index:: pair: object; frozenset These represent an immutable set. They are created by the built-in :func:`frozenset` constructor. As a frozenset is immutable and @@ -418,9 +418,9 @@ Set types Mappings .. index:: - builtin: len + pair: built-in function; len single: subscription - object: mapping + pair: object; mapping These represent finite sets of objects indexed by arbitrary index sets. The subscript notation ``a[k]`` selects the item indexed by ``k`` from the mapping @@ -431,7 +431,7 @@ Mappings There is currently a single intrinsic mapping type: Dictionaries - .. index:: object: dictionary + .. index:: pair: object; dictionary These represent finite sets of objects indexed by nearly arbitrary values. The only types of values not acceptable as keys are values containing lists or @@ -451,8 +451,8 @@ Mappings section :ref:`dict`). .. index:: - module: dbm.ndbm - module: dbm.gnu + pair: module; dbm.ndbm + pair: module; dbm.gnu The extension modules :mod:`dbm.ndbm` and :mod:`dbm.gnu` provide additional examples of mapping types, as does the :mod:`collections` @@ -465,7 +465,7 @@ Mappings Callable types .. index:: - object: callable + pair: object; callable pair: function; call single: invocation pair: function; argument @@ -476,8 +476,8 @@ Callable types User-defined functions .. index:: pair: user-defined; function - object: function - object: user-defined function + pair: object; function + pair: object; user-defined function A user-defined function object is created by a function definition (see section :ref:`function`). It should be called with an argument list @@ -580,8 +580,8 @@ Callable types Instance methods .. index:: - object: method - object: user-defined method + pair: object; method + pair: object; user-defined method pair: user-defined; method An instance method object combines a class, a class instance and any @@ -688,8 +688,8 @@ Callable types Built-in functions .. index:: - object: built-in function - object: function + pair: object; built-in function + pair: object; function pair: C; language A built-in function object is a wrapper around a C function. Examples of @@ -703,8 +703,8 @@ Callable types Built-in methods .. index:: - object: built-in method - object: method + pair: object; built-in method + pair: object; method pair: built-in; method This is really a different disguise of a built-in function, this time containing @@ -727,8 +727,8 @@ Callable types Modules .. index:: - statement: import - object: module + pair: statement; import + pair: object; module Modules are a basic organizational unit of Python code, and are created by the :ref:`import system ` as invoked either by the @@ -805,12 +805,12 @@ Custom classes .. XXX: Could we add that MRO doc as an appendix to the language ref? .. index:: - object: class - object: class instance - object: instance + pair: object; class + pair: object; class instance + pair: object; instance pair: class object; call single: container - object: dictionary + pair: object; dictionary pair: class; attribute When a class attribute reference (for class :class:`C`, say) would yield a @@ -865,8 +865,8 @@ Custom classes Class instances .. index:: - object: class instance - object: instance + pair: object; class instance + pair: object; instance pair: class; instance pair: class instance; attribute @@ -892,9 +892,9 @@ Class instances dictionary directly. .. index:: - object: numeric - object: sequence - object: mapping + pair: object; numeric + pair: object; sequence + pair: object; mapping Class instances can pretend to be numbers, sequences, or mappings if they have methods with certain special names. See section :ref:`specialnames`. @@ -908,8 +908,8 @@ Class instances I/O objects (also known as file objects) .. index:: - builtin: open - module: io + pair: built-in function; open + pair: module; io single: popen() (in module os) single: makefile() (socket method) single: sys.stdin @@ -996,7 +996,7 @@ Internal types required stack size; :attr:`co_flags` is an integer encoding a number of flags for the interpreter. - .. index:: object: generator + .. index:: pair: object; generator The following flag bits are defined for :attr:`co_flags`: bit ``0x04`` is set if the function uses the ``*arguments`` syntax to accept an arbitrary number of @@ -1053,7 +1053,7 @@ Internal types .. _frame-objects: Frame objects - .. index:: object: frame + .. index:: pair: object; frame Frame objects represent execution frames. They may occur in traceback objects (see below), and are also passed to registered trace functions. @@ -1116,7 +1116,7 @@ Internal types Traceback objects .. index:: - object: traceback + pair: object; traceback pair: stack; trace pair: exception; handler pair: execution; stack @@ -1151,7 +1151,7 @@ Internal types single: tb_frame (traceback attribute) single: tb_lineno (traceback attribute) single: tb_lasti (traceback attribute) - statement: try + pair: statement; try Special read-only attributes: :attr:`tb_frame` points to the execution frame of the current level; @@ -1177,7 +1177,7 @@ Internal types and the ``tb_next`` attribute of existing instances can be updated. Slice objects - .. index:: builtin: slice + .. index:: pair: built-in function; slice Slice objects are used to represent slices for :meth:`~object.__getitem__` @@ -1310,7 +1310,7 @@ Basic customization .. index:: single: destructor single: finalizer - statement: del + pair: statement; del Called when the instance is about to be destroyed. This is also called a finalizer or (improperly) a destructor. If a base class has a @@ -1411,7 +1411,7 @@ Basic customization .. method:: object.__bytes__(self) - .. index:: builtin: bytes + .. index:: pair: built-in function; bytes Called by :ref:`bytes ` to compute a byte-string representation of an object. This should return a :class:`bytes` object. @@ -1419,7 +1419,7 @@ Basic customization .. index:: single: string; __format__() (object method) pair: string; conversion - builtin: print + pair: built-in function; print .. method:: object.__format__(self, format_spec) @@ -1498,8 +1498,8 @@ Basic customization .. method:: object.__hash__(self) .. index:: - object: dictionary - builtin: hash + pair: object; dictionary + pair: built-in function; hash Called by built-in function :func:`hash` and for operations on members of hashed collections including :class:`set`, :class:`frozenset`, and @@ -1564,7 +1564,7 @@ Basic customization This is intended to provide protection against a denial-of-service caused by carefully chosen inputs that exploit the worst case performance of a dict insertion, O(n\ :sup:`2`) complexity. See - http://www.ocert.org/advisories/ocert-2011-003.html for details. + http://ocert.org/advisories/ocert-2011-003.html for details. Changing hash values affects the iteration order of sets. Python has never made guarantees about this ordering @@ -2050,7 +2050,7 @@ Metaclasses .. index:: single: metaclass - builtin: type + pair: built-in function; type single: = (equals); class definition By default, classes are constructed using :func:`type`. The class body is @@ -2477,7 +2477,7 @@ through the object's keys; for sequences, it should iterate through the values. .. method:: object.__len__(self) .. index:: - builtin: len + pair: built-in function; len single: __bool__() (object method) Called to implement the built-in function :func:`len`. Should return the length @@ -2506,7 +2506,7 @@ through the object's keys; for sequences, it should iterate through the values. .. versionadded:: 3.4 -.. index:: object: slice +.. index:: pair: object; slice .. note:: @@ -2635,9 +2635,9 @@ left undefined. object.__or__(self, other) .. index:: - builtin: divmod - builtin: pow - builtin: pow + pair: built-in function; divmod + pair: built-in function; pow + pair: built-in function; pow These methods are called to implement the binary arithmetic operations (``+``, ``-``, ``*``, ``@``, ``/``, ``//``, ``%``, :func:`divmod`, @@ -2670,8 +2670,8 @@ left undefined. object.__ror__(self, other) .. index:: - builtin: divmod - builtin: pow + pair: built-in function; divmod + pair: built-in function; pow These methods are called to implement the binary arithmetic operations (``+``, ``-``, ``*``, ``@``, ``/``, ``//``, ``%``, :func:`divmod`, @@ -2683,7 +2683,7 @@ left undefined. ``type(y).__rsub__(y, x)`` is called if ``type(x).__sub__(x, y)`` returns *NotImplemented*. - .. index:: builtin: pow + .. index:: pair: built-in function; pow Note that ternary :func:`pow` will not try calling :meth:`__rpow__` (the coercion rules would become too complicated). @@ -2730,7 +2730,7 @@ left undefined. object.__abs__(self) object.__invert__(self) - .. index:: builtin: abs + .. index:: pair: built-in function; abs Called to implement the unary arithmetic operations (``-``, ``+``, :func:`abs` and ``~``). @@ -2741,9 +2741,9 @@ left undefined. object.__float__(self) .. index:: - builtin: complex - builtin: int - builtin: float + pair: built-in function; complex + pair: built-in function; int + pair: built-in function; float Called to implement the built-in functions :func:`complex`, :func:`int` and :func:`float`. Should return a value @@ -2768,7 +2768,7 @@ left undefined. object.__floor__(self) object.__ceil__(self) - .. index:: builtin: round + .. index:: pair: built-in function; round Called to implement the built-in function :func:`round` and :mod:`math` functions :func:`~math.trunc`, :func:`~math.floor` and :func:`~math.ceil`. @@ -2796,7 +2796,7 @@ execution of the block of code. Context managers are normally invoked using the used by directly invoking their methods. .. index:: - statement: with + pair: statement; with single: context manager Typical uses of context managers include saving and restoring various kinds of @@ -2865,6 +2865,47 @@ a :exc:`TypeError`. The specification for the Python ``match`` statement. +.. _python-buffer-protocol: + +Emulating buffer types +---------------------- + +The :ref:`buffer protocol ` provides a way for Python +objects to expose efficient access to a low-level memory array. This protocol +is implemented by builtin types such as :class:`bytes` and :class:`memoryview`, +and third-party libraries may define additional buffer types. + +While buffer types are usually implemented in C, it is also possible to +implement the protocol in Python. + +.. method:: object.__buffer__(self, flags) + + Called when a buffer is requested from *self* (for example, by the + :class:`memoryview` constructor). The *flags* argument is an integer + representing the kind of buffer requested, affecting for example whether + the returned buffer is read-only or writable. :class:`inspect.BufferFlags` + provides a convenient way to interpret the flags. The method must return + a :class:`memoryview` object. + +.. method:: object.__release_buffer__(self, buffer) + + Called when a buffer is no longer needed. The *buffer* argument is a + :class:`memoryview` object that was previously returned by + :meth:`~object.__buffer__`. The method must release any resources associated + with the buffer. This method should return ``None``. + Buffer objects that do not need to perform any cleanup are not required + to implement this method. + +.. versionadded:: 3.12 + +.. seealso:: + + :pep:`688` - Making the buffer protocol accessible in Python + Introduces the Python ``__buffer__`` and ``__release_buffer__`` methods. + + :class:`collections.abc.Buffer` + ABC for buffer types. + .. _special-lookup: Special method lookup diff --git a/Doc/reference/executionmodel.rst b/Doc/reference/executionmodel.rst index a264015cbf4049..8917243999d399 100644 --- a/Doc/reference/executionmodel.rst +++ b/Doc/reference/executionmodel.rst @@ -151,7 +151,7 @@ to previously bound variables in the nearest enclosing function scope. :exc:`SyntaxError` is raised at compile time if the given name does not exist in any enclosing function scope. -.. index:: module: __main__ +.. index:: pair: module; __main__ The namespace for a module is automatically created the first time a module is imported. The main module for a script is always called :mod:`__main__`. diff --git a/Doc/reference/expressions.rst b/Doc/reference/expressions.rst index 1e4a13fbd6a3ce..b97a08f25d92a2 100644 --- a/Doc/reference/expressions.rst +++ b/Doc/reference/expressions.rst @@ -71,7 +71,7 @@ An identifier occurring as an atom is a name. See section :ref:`identifiers` for lexical definition and section :ref:`naming` for documentation of naming and binding. -.. index:: exception: NameError +.. index:: pair: exception; NameError When the name is bound to an object, evaluation of the atom yields that object. When a name is not bound, an attempt to evaluate it raises a :exc:`NameError` @@ -245,7 +245,7 @@ List displays pair: list; display pair: list; comprehensions pair: empty; list - object: list + pair: object; list single: [] (square brackets); list expression single: , (comma); expression list @@ -270,7 +270,7 @@ Set displays .. index:: pair: set; display pair: set; comprehensions - object: set + pair: object; set single: {} (curly brackets); set expression single: , (comma); expression list @@ -299,7 +299,7 @@ Dictionary displays pair: dictionary; display pair: dictionary; comprehensions key, datum, key/datum pair - object: dictionary + pair: object; dictionary single: {} (curly brackets); dictionary expression single: : (colon); in dictionary expressions single: , (comma); in dictionary displays @@ -361,7 +361,7 @@ Generator expressions .. index:: pair: generator; expression - object: generator + pair: object; generator single: () (parentheses); generator expression A generator expression is a compact generator notation in parentheses: @@ -415,8 +415,8 @@ Yield expressions ----------------- .. index:: - keyword: yield - keyword: from + pair: keyword; yield + pair: keyword; from pair: yield; expression pair: generator; function @@ -522,7 +522,7 @@ on the right hand side of an assignment statement. The proposal that expanded on :pep:`492` by adding generator capabilities to coroutine functions. -.. index:: object: generator +.. index:: pair: object; generator .. _generator-methods: Generator-iterator methods @@ -534,7 +534,7 @@ be used to control the execution of a generator function. Note that calling any of the generator methods below when the generator is already executing raises a :exc:`ValueError` exception. -.. index:: exception: StopIteration +.. index:: pair: exception; StopIteration .. method:: generator.__next__() @@ -589,7 +589,7 @@ is already executing raises a :exc:`ValueError` exception. The second signature \(type\[, value\[, traceback\]\]\) is deprecated and may be removed in a future version of Python. -.. index:: exception: GeneratorExit +.. index:: pair: exception; GeneratorExit .. method:: generator.close() @@ -701,7 +701,7 @@ of a *finalizer* method see the implementation of The expression ``yield from `` is a syntax error when used in an asynchronous generator function. -.. index:: object: asynchronous-generator +.. index:: pair: object; asynchronous-generator .. _asynchronous-generator-methods: Asynchronous generator-iterator methods @@ -711,7 +711,7 @@ This subsection describes the methods of an asynchronous generator iterator, which are used to control the execution of a generator function. -.. index:: exception: StopAsyncIteration +.. index:: pair: exception; StopAsyncIteration .. coroutinemethod:: agen.__anext__() @@ -763,7 +763,7 @@ which are used to control the execution of a generator function. The second signature \(type\[, value\[, traceback\]\]\) is deprecated and may be removed in a future version of Python. -.. index:: exception: GeneratorExit +.. index:: pair: exception; GeneratorExit .. coroutinemethod:: agen.aclose() @@ -810,9 +810,9 @@ An attribute reference is a primary followed by a period and a name: attributeref: `primary` "." `identifier` .. index:: - exception: AttributeError - object: module - object: list + pair: exception; AttributeError + pair: object; module + pair: object; list The primary must evaluate to an object of a type that supports attribute references, which most objects do. This object is then asked to produce the @@ -833,12 +833,12 @@ Subscriptions single: [] (square brackets); subscription .. index:: - object: sequence - object: mapping - object: string - object: tuple - object: list - object: dictionary + pair: object; sequence + pair: object; mapping + pair: object; string + pair: object; tuple + pair: object; list + pair: object; dictionary pair: sequence; item The subscription of an instance of a :ref:`container class ` @@ -906,10 +906,10 @@ Slicings single: , (comma); slicing .. index:: - object: sequence - object: string - object: tuple - object: list + pair: object; sequence + pair: object; string + pair: object; tuple + pair: object; list A slicing selects a range of items in a sequence object (e.g., a string, tuple or list). Slicings may be used as expressions or as targets in assignment or @@ -950,7 +950,7 @@ substituting ``None`` for missing expressions. .. index:: - object: callable + pair: object; callable single: call single: argument; call semantics single: () (parentheses); call @@ -1100,8 +1100,8 @@ a user-defined function: .. index:: pair: function; call triple: user-defined; function; call - object: user-defined function - object: function + pair: object; user-defined function + pair: object; function The code block for the function is executed, passing it the argument list. The first thing the code block will do is bind the formal parameters to the @@ -1115,25 +1115,25 @@ a built-in function or method: pair: built-in function; call pair: method; call pair: built-in method; call - object: built-in method - object: built-in function - object: method - object: function + pair: object; built-in method + pair: object; built-in function + pair: object; method + pair: object; function The result is up to the interpreter; see :ref:`built-in-funcs` for the descriptions of built-in functions and methods. a class object: .. index:: - object: class + pair: object; class pair: class object; call A new instance of that class is returned. a class instance method: .. index:: - object: class instance - object: instance + pair: object; class instance + pair: object; instance pair: class instance; call The corresponding user-defined function is called, with an argument list that is @@ -1149,7 +1149,7 @@ a class instance: if that method was called. -.. index:: keyword: await +.. index:: pair: keyword; await .. _await: Await expression @@ -1171,7 +1171,7 @@ The power operator .. index:: pair: power; operation - operator: ** + pair: operator; ** The power operator binds more tightly than unary operators on its left; it binds less tightly than unary operators on its right. The syntax is: @@ -1232,7 +1232,7 @@ operation can be overridden with the :meth:`__pos__` special method. .. index:: single: inversion - operator: ~ (tilde) + pair: operator; ~ (tilde) The unary ``~`` (invert) operator yields the bitwise inversion of its integer argument. The bitwise inversion of ``x`` is defined as ``-(x+1)``. It only @@ -1241,7 +1241,7 @@ applies to integral numbers or to custom objects that override the -.. index:: exception: TypeError +.. index:: pair: exception; TypeError In all three cases, if the argument does not have the proper type, a :exc:`TypeError` exception is raised. @@ -1267,7 +1267,7 @@ operators and one for additive operators: .. index:: single: multiplication - operator: * (asterisk) + pair: operator; * (asterisk) The ``*`` (multiplication) operator yields the product of its arguments. The arguments must either both be numbers, or one argument must be an integer and @@ -1280,7 +1280,7 @@ This operation can be customized using the special :meth:`__mul__` and .. index:: single: matrix multiplication - operator: @ (at) + pair: operator; @ (at) The ``@`` (at) operator is intended to be used for matrix multiplication. No builtin Python types implement this operator. @@ -1288,10 +1288,10 @@ builtin Python types implement this operator. .. versionadded:: 3.5 .. index:: - exception: ZeroDivisionError + pair: exception; ZeroDivisionError single: division - operator: / (slash) - operator: // + pair: operator; / (slash) + pair: operator; // The ``/`` (division) and ``//`` (floor division) operators yield the quotient of their arguments. The numeric arguments are first converted to a common type. @@ -1305,7 +1305,7 @@ This operation can be customized using the special :meth:`__truediv__` and .. index:: single: modulo - operator: % (percent) + pair: operator; % (percent) The ``%`` (modulo) operator yields the remainder from the division of the first argument by the second. The numeric arguments are first converted to a common @@ -1363,8 +1363,8 @@ Shifting operations .. index:: pair: shifting; operation - operator: << - operator: >> + pair: operator; << + pair: operator; >> The shifting operations have lower priority than the arithmetic operations: @@ -1377,7 +1377,7 @@ the left or right by the number of bits given by the second argument. This operation can be customized using the special :meth:`__lshift__` and :meth:`__rshift__` methods. -.. index:: exception: ValueError +.. index:: pair: exception; ValueError A right shift by *n* bits is defined as floor division by ``pow(2,n)``. A left shift by *n* bits is defined as multiplication with ``pow(2,n)``. @@ -1399,7 +1399,7 @@ Each of the three bitwise operations has a different priority level: .. index:: pair: bitwise; and - operator: & (ampersand) + pair: operator; & (ampersand) The ``&`` operator yields the bitwise AND of its arguments, which must be integers or one of them must be a custom object overriding :meth:`__and__` or @@ -1408,7 +1408,7 @@ integers or one of them must be a custom object overriding :meth:`__and__` or .. index:: pair: bitwise; xor pair: exclusive; or - operator: ^ (caret) + pair: operator; ^ (caret) The ``^`` operator yields the bitwise XOR (exclusive OR) of its arguments, which must be integers or one of them must be a custom object overriding :meth:`__xor__` or @@ -1417,7 +1417,7 @@ must be integers or one of them must be a custom object overriding :meth:`__xor_ .. index:: pair: bitwise; or pair: inclusive; or - operator: | (vertical bar) + pair: operator; | (vertical bar) The ``|`` operator yields the bitwise (inclusive) OR of its arguments, which must be integers or one of them must be a custom object overriding :meth:`__or__` or @@ -1432,12 +1432,12 @@ Comparisons .. index:: single: comparison pair: C; language - operator: < (less) - operator: > (greater) - operator: <= - operator: >= - operator: == - operator: != + pair: operator; < (less) + pair: operator; > (greater) + pair: operator; <= + pair: operator; >= + pair: operator; == + pair: operator; != Unlike C, all comparison operations in Python have the same priority, which is lower than that of any arithmetic, shifting or bitwise operation. Also unlike @@ -1669,17 +1669,17 @@ raises the :exc:`IndexError` exception. (If any other exception is raised, it i if :keyword:`in` raised that exception). .. index:: - operator: in - operator: not in + pair: operator; in + pair: operator; not in pair: membership; test - object: sequence + pair: object; sequence The operator :keyword:`not in` is defined to have the inverse truth value of :keyword:`in`. .. index:: - operator: is - operator: is not + pair: operator; is + pair: operator; is not pair: identity; test @@ -1719,17 +1719,17 @@ control flow statements, the following values are interpreted as false: other values are interpreted as true. User-defined objects can customize their truth value by providing a :meth:`__bool__` method. -.. index:: operator: not +.. index:: pair: operator; not The operator :keyword:`not` yields ``True`` if its argument is false, ``False`` otherwise. -.. index:: operator: and +.. index:: pair: operator; and The expression ``x and y`` first evaluates *x*; if *x* is false, its value is returned; otherwise, *y* is evaluated and the resulting value is returned. -.. index:: operator: or +.. index:: pair: operator; or The expression ``x or y`` first evaluates *x*; if *x* is true, its value is returned; otherwise, *y* is evaluated and the resulting value is returned. @@ -1854,7 +1854,7 @@ Expression lists starred_expression: `expression` | (`starred_item` ",")* [`starred_item`] starred_item: `assignment_expression` | "*" `or_expr` -.. index:: object: tuple +.. index:: pair: object; tuple Except when part of a list or set display, an expression list containing at least one comma yields a tuple. The length of diff --git a/Doc/reference/import.rst b/Doc/reference/import.rst index 57eb5403243eef..0f416a5c583f85 100644 --- a/Doc/reference/import.rst +++ b/Doc/reference/import.rst @@ -324,15 +324,18 @@ modules, and one that knows how to import modules from an :term:`import path` .. versionchanged:: 3.4 The :meth:`~importlib.abc.MetaPathFinder.find_spec` method of meta path - finders replaced :meth:`~importlib.abc.MetaPathFinder.find_module`, which + finders replaced :meth:`!find_module`, which is now deprecated. While it will continue to work without change, the import machinery will try it only if the finder does not implement ``find_spec()``. .. versionchanged:: 3.10 - Use of :meth:`~importlib.abc.MetaPathFinder.find_module` by the import system + Use of :meth:`!find_module` by the import system now raises :exc:`ImportWarning`. +.. versionchanged:: 3.12 + ``find_module()`` has been removed. Use :meth:`find_spec` instead. + Loading ======= @@ -703,7 +706,7 @@ Here are the exact rules used: * Otherwise, just use the module's ``__name__`` in the repr. .. versionchanged:: 3.12 - Use of :meth:`module_repr`, having been deprecated since Python 3.4, was + Use of :meth:`!module_repr`, having been deprecated since Python 3.4, was removed in Python 3.12 and is no longer called during the resolution of a module's repr. @@ -837,7 +840,7 @@ stores finder objects rather than being limited to :term:`importer` objects). In this way, the expensive search for a particular :term:`path entry` location's :term:`path entry finder` need only be done once. User code is free to remove cache entries from :data:`sys.path_importer_cache` forcing -the path based finder to perform the path entry search again [#fnpic]_. +the path based finder to perform the path entry search again. If the path entry is not present in the cache, the path based finder iterates over every callable in :data:`sys.path_hooks`. Each of the :term:`path entry @@ -887,13 +890,13 @@ module. ``find_spec()`` returns a fully populated spec for the module. This spec will always have "loader" set (with one exception). To indicate to the import machinery that the spec represents a namespace -:term:`portion`, the path entry finder sets "submodule_search_locations" to +:term:`portion`, the path entry finder sets ``submodule_search_locations`` to a list containing the portion. .. versionchanged:: 3.4 :meth:`~importlib.abc.PathEntryFinder.find_spec` replaced - :meth:`~importlib.abc.PathEntryFinder.find_loader` and - :meth:`~importlib.abc.PathEntryFinder.find_module`, both of which + :meth:`!find_loader` and + :meth:`!find_module`, both of which are now deprecated, but will be used if ``find_spec()`` is not defined. Older path entry finders may implement one of these two deprecated methods @@ -901,7 +904,7 @@ a list containing the portion. sake of backward compatibility. However, if ``find_spec()`` is implemented on the path entry finder, the legacy methods are ignored. - :meth:`~importlib.abc.PathEntryFinder.find_loader` takes one argument, the + :meth:`!find_loader` takes one argument, the fully qualified name of the module being imported. ``find_loader()`` returns a 2-tuple where the first item is the loader and the second item is a namespace :term:`portion`. @@ -920,10 +923,13 @@ a list containing the portion. ``find_loader()`` in preference to ``find_module()``. .. versionchanged:: 3.10 - Calls to :meth:`~importlib.abc.PathEntryFinder.find_module` and - :meth:`~importlib.abc.PathEntryFinder.find_loader` by the import + Calls to :meth:`!find_module` and + :meth:`!find_loader` by the import system will raise :exc:`ImportWarning`. +.. versionchanged:: 3.12 + ``find_module()`` and ``find_loader()`` have been removed. + Replacing the standard import system ==================================== @@ -1045,8 +1051,8 @@ The original specification for :data:`sys.meta_path` was :pep:`302`, with subsequent extension in :pep:`420`. :pep:`420` introduced :term:`namespace packages ` for -Python 3.3. :pep:`420` also introduced the :meth:`find_loader` protocol as an -alternative to :meth:`find_module`. +Python 3.3. :pep:`420` also introduced the :meth:`!find_loader` protocol as an +alternative to :meth:`!find_module`. :pep:`366` describes the addition of the ``__package__`` attribute for explicit relative imports in main modules. @@ -1073,9 +1079,3 @@ methods to finders and loaders. module may replace itself in :data:`sys.modules`. This is implementation-specific behavior that is not guaranteed to work in other Python implementations. - -.. [#fnpic] In legacy code, it is possible to find instances of - :class:`imp.NullImporter` in the :data:`sys.path_importer_cache`. It - is recommended that code be changed to use ``None`` instead. See - :ref:`portingpythoncode` for more details. Note that the ``imp`` module - was removed in Python 3.12. diff --git a/Doc/reference/introduction.rst b/Doc/reference/introduction.rst index 914a11556c94e6..81f0a5c5d43883 100644 --- a/Doc/reference/introduction.rst +++ b/Doc/reference/introduction.rst @@ -74,7 +74,7 @@ PyPy and a Just in Time compiler. One of the goals of the project is to encourage experimentation with the language itself by making it easier to modify the interpreter (since it is written in Python). Additional information is - available on `the PyPy project's home page `_. + available on `the PyPy project's home page `_. Each of these implementations varies in some way from the language as documented in this manual, or introduces specific information beyond what's covered in the diff --git a/Doc/reference/simple_stmts.rst b/Doc/reference/simple_stmts.rst index c98ac81e415b72..f7a8b44d195417 100644 --- a/Doc/reference/simple_stmts.rst +++ b/Doc/reference/simple_stmts.rst @@ -53,8 +53,8 @@ An expression statement evaluates the expression list (which may be a single expression). .. index:: - builtin: repr - object: None + pair: built-in function; repr + pair: object; None pair: string; conversion single: output pair: standard; output @@ -76,7 +76,7 @@ Assignment statements pair: assignment; statement pair: binding; name pair: rebinding; name - object: mutable + pair: object; mutable pair: attribute; assignment Assignment statements are used to (re)bind names to values and to modify @@ -185,7 +185,7 @@ Assignment of an object to a single target is recursively defined as follows. .. index:: pair: subscription; assignment - object: mutable + pair: object; mutable * If the target is a subscription: The primary expression in the reference is evaluated. It should yield either a mutable sequence object (such as a list) @@ -193,8 +193,8 @@ Assignment of an object to a single target is recursively defined as follows. evaluated. .. index:: - object: sequence - object: list + pair: object; sequence + pair: object; list If the primary is a mutable sequence object (such as a list), the subscript must yield an integer. If it is negative, the sequence's length is added to @@ -204,8 +204,8 @@ Assignment of an object to a single target is recursively defined as follows. raised (assignment to a subscripted sequence cannot add new items to a list). .. index:: - object: mapping - object: dictionary + pair: object; mapping + pair: object; dictionary If the primary is a mapping object (such as a dictionary), the subscript must have a type compatible with the mapping's key type, and the mapping is then @@ -376,7 +376,7 @@ The :keyword:`!assert` statement ================================ .. index:: - ! statement: assert + ! pair: statement; assert pair: debugging; assertions single: , (comma); expression list @@ -398,7 +398,7 @@ The extended form, ``assert expression1, expression2``, is equivalent to :: .. index:: single: __debug__ - exception: AssertionError + pair: exception; AssertionError These equivalences assume that :const:`__debug__` and :exc:`AssertionError` refer to the built-in variables with those names. In the current implementation, the @@ -419,7 +419,7 @@ The :keyword:`!pass` statement ============================== .. index:: - statement: pass + pair: statement; pass pair: null; operation pair: null; operation @@ -441,7 +441,7 @@ The :keyword:`!del` statement ============================= .. index:: - ! statement: del + ! pair: statement; del pair: deletion; target triple: deletion; target; list @@ -454,7 +454,7 @@ Rather than spelling it out in full details, here are some hints. Deletion of a target list recursively deletes each target, from left to right. .. index:: - statement: global + pair: statement; global pair: unbinding; name Deletion of a name removes the binding of that name from the local or global @@ -480,7 +480,7 @@ The :keyword:`!return` statement ================================ .. index:: - ! statement: return + ! pair: statement; return pair: function; definition pair: class; definition @@ -495,7 +495,7 @@ If an expression list is present, it is evaluated, else ``None`` is substituted. :keyword:`return` leaves the current function call with the expression list (or ``None``) as return value. -.. index:: keyword: finally +.. index:: pair: keyword; finally When :keyword:`return` passes control out of a :keyword:`try` statement with a :keyword:`finally` clause, that :keyword:`!finally` clause is executed before @@ -517,11 +517,11 @@ The :keyword:`!yield` statement =============================== .. index:: - statement: yield + pair: statement; yield single: generator; function single: generator; iterator single: function; generator - exception: StopIteration + pair: exception; StopIteration .. productionlist:: python-grammar yield_stmt: `yield_expression` @@ -553,7 +553,7 @@ The :keyword:`!raise` statement =============================== .. index:: - ! statement: raise + ! pair: statement; raise single: exception pair: raising; exception single: __traceback__ (exception attribute) @@ -574,7 +574,7 @@ instantiating the class with no arguments. The :dfn:`type` of the exception is the exception instance's class, the :dfn:`value` is the instance itself. -.. index:: object: traceback +.. index:: pair: object; traceback A traceback object is normally created automatically when an exception is raised and attached to it as the :attr:`__traceback__` attribute, which is writable. @@ -667,9 +667,9 @@ The :keyword:`!break` statement =============================== .. index:: - ! statement: break - statement: for - statement: while + ! pair: statement; break + pair: statement; for + pair: statement; while pair: loop; statement .. productionlist:: python-grammar @@ -679,7 +679,7 @@ The :keyword:`!break` statement :keyword:`while` loop, but not nested in a function or class definition within that loop. -.. index:: keyword: else +.. index:: pair: keyword; else pair: loop control; target It terminates the nearest enclosing loop, skipping the optional :keyword:`!else` @@ -688,7 +688,7 @@ clause if the loop has one. If a :keyword:`for` loop is terminated by :keyword:`break`, the loop control target keeps its current value. -.. index:: keyword: finally +.. index:: pair: keyword; finally When :keyword:`break` passes control out of a :keyword:`try` statement with a :keyword:`finally` clause, that :keyword:`!finally` clause is executed before @@ -701,11 +701,11 @@ The :keyword:`!continue` statement ================================== .. index:: - ! statement: continue - statement: for - statement: while + ! pair: statement; continue + pair: statement; for + pair: statement; while pair: loop; statement - keyword: finally + pair: keyword; finally .. productionlist:: python-grammar continue_stmt: "continue" @@ -726,12 +726,12 @@ The :keyword:`!import` statement ================================ .. index:: - ! statement: import + ! pair: statement; import single: module; importing pair: name; binding - keyword: from - keyword: as - exception: ImportError + pair: keyword; from + pair: keyword; as + pair: exception; ImportError single: , (comma); import statement .. productionlist:: python-grammar @@ -942,7 +942,7 @@ The :keyword:`!global` statement ================================ .. index:: - ! statement: global + ! pair: statement; global triple: global; name; binding single: , (comma); identifier list @@ -970,9 +970,9 @@ annotation. them or silently change the meaning of the program. .. index:: - builtin: exec - builtin: eval - builtin: compile + pair: built-in function; exec + pair: built-in function; eval + pair: built-in function; compile **Programmer's note:** :keyword:`global` is a directive to the parser. It applies only to code parsed at the same time as the :keyword:`!global` statement. @@ -988,7 +988,7 @@ call. The same applies to the :func:`eval` and :func:`compile` functions. The :keyword:`!nonlocal` statement ================================== -.. index:: statement: nonlocal +.. index:: pair: statement; nonlocal single: , (comma); identifier list .. productionlist:: python-grammar diff --git a/Doc/reference/toplevel_components.rst b/Doc/reference/toplevel_components.rst index 319c9de484241e..dd3d3d6878e289 100644 --- a/Doc/reference/toplevel_components.rst +++ b/Doc/reference/toplevel_components.rst @@ -21,9 +21,9 @@ Complete Python programs .. index:: single: program .. index:: - module: sys - module: __main__ - module: builtins + pair: module; sys + pair: module; __main__ + pair: module; builtins While a language specification need not prescribe how the language interpreter is invoked, it is useful to have a notion of a complete Python program. A @@ -38,7 +38,7 @@ the next section. .. index:: single: interactive mode - module: __main__ + pair: module; __main__ The interpreter may also be invoked in interactive mode; in this case, it does not read and execute a complete program but reads and executes one statement @@ -98,7 +98,7 @@ Expression input ================ .. index:: single: input -.. index:: builtin: eval +.. index:: pair: built-in function; eval :func:`eval` is used for expression input. It ignores leading whitespace. The string argument to :func:`eval` must have the following form: diff --git a/Doc/tools/extensions/pyspecific.py b/Doc/tools/extensions/pyspecific.py index d659a4a54b9d11..cd8d9febb0d13b 100644 --- a/Doc/tools/extensions/pyspecific.py +++ b/Doc/tools/extensions/pyspecific.py @@ -674,6 +674,30 @@ def process_audit_events(app, doctree, fromdocname): node.replace_self(table) +def patch_pairindextypes(app, _env) -> None: + """Remove all entries from ``pairindextypes`` before writing POT files. + + We want to run this just before writing output files, as the check to + circumvent is in ``I18nBuilder.write_doc()``. + As such, we link this to ``env-check-consistency``, even though it has + nothing to do with the environment consistency check. + """ + if app.builder.name != 'gettext': + return + + # allow translating deprecated index entries + try: + from sphinx.domains.python import pairindextypes + except ImportError: + pass + else: + # Sphinx checks if a 'pair' type entry on an index directive is one of + # the Sphinx-translated pairindextypes values. As we intend to move + # away from this, we need Sphinx to believe that these values don't + # exist, by deleting them when using the gettext builder. + pairindextypes.clear() + + def setup(app): app.add_role('issue', issue_role) app.add_role('gh', gh_issue_role) @@ -695,6 +719,7 @@ def setup(app): app.add_directive_to_domain('py', 'awaitablemethod', PyAwaitableMethod) app.add_directive_to_domain('py', 'abstractmethod', PyAbstractMethod) app.add_directive('miscnews', MiscNews) + app.connect('env-check-consistency', patch_pairindextypes) app.connect('doctree-resolved', process_audit_events) app.connect('env-merge-info', audit_events_merge) app.connect('env-purge-doc', audit_events_purge) diff --git a/Doc/tutorial/classes.rst b/Doc/tutorial/classes.rst index 116801177a3add..06445e000c1ef6 100644 --- a/Doc/tutorial/classes.rst +++ b/Doc/tutorial/classes.rst @@ -344,7 +344,7 @@ list objects have methods called append, insert, remove, sort, and so on. However, in the following discussion, we'll use the term method exclusively to mean methods of class instance objects, unless explicitly stated otherwise.) -.. index:: object: method +.. index:: pair: object; method Valid method names of an instance object depend on its class. By definition, all attributes of a class that are function objects define corresponding diff --git a/Doc/tutorial/controlflow.rst b/Doc/tutorial/controlflow.rst index 52db51e84cd5fc..c9b3d982c31c9a 100644 --- a/Doc/tutorial/controlflow.rst +++ b/Doc/tutorial/controlflow.rst @@ -46,7 +46,7 @@ details see :ref:`tut-match`. ========================== .. index:: - statement: for + pair: statement; for The :keyword:`for` statement in Python differs a bit from what you may be used to in C or Pascal. Rather than always iterating over an arithmetic progression diff --git a/Doc/tutorial/inputoutput.rst b/Doc/tutorial/inputoutput.rst index 3581b3727a53ea..f5cdd84cbadefe 100644 --- a/Doc/tutorial/inputoutput.rst +++ b/Doc/tutorial/inputoutput.rst @@ -285,8 +285,8 @@ Reading and Writing Files ========================= .. index:: - builtin: open - object: file + pair: built-in function; open + pair: object; file :func:`open` returns a :term:`file object`, and is most commonly used with two positional arguments and one keyword argument: @@ -466,7 +466,7 @@ Reference for a complete guide to file objects. Saving structured data with :mod:`json` --------------------------------------- -.. index:: module: json +.. index:: pair: module; json Strings can easily be written to and read from a file. Numbers take a bit more effort, since the :meth:`read` method only returns strings, which will have to diff --git a/Doc/tutorial/modules.rst b/Doc/tutorial/modules.rst index 4daafa49a34d2e..3bd034bcc9703f 100644 --- a/Doc/tutorial/modules.rst +++ b/Doc/tutorial/modules.rst @@ -264,7 +264,7 @@ Some tips for experts: Standard Modules ================ -.. index:: module: sys +.. index:: pair: module; sys Python comes with a library of standard modules, described in a separate document, the Python Library Reference ("Library Reference" hereafter). Some @@ -345,7 +345,7 @@ Without arguments, :func:`dir` lists the names you have defined currently:: Note that it lists all types of names: variables, modules, functions, etc. -.. index:: module: builtins +.. index:: pair: module; builtins :func:`dir` does not list the names of built-in functions and variables. If you want a list of those, they are defined in the standard module diff --git a/Doc/tutorial/stdlib.rst b/Doc/tutorial/stdlib.rst index 4f5ada90eb57bc..6bae279c5e9cde 100644 --- a/Doc/tutorial/stdlib.rst +++ b/Doc/tutorial/stdlib.rst @@ -24,7 +24,7 @@ Be sure to use the ``import os`` style instead of ``from os import *``. This will keep :func:`os.open` from shadowing the built-in :func:`open` function which operates much differently. -.. index:: builtin: help +.. index:: pair: built-in function; help The built-in :func:`dir` and :func:`help` functions are useful as interactive aids for working with large modules like :mod:`os`:: diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst index b35e8454fa2a1a..9d4042ce5a7e8a 100644 --- a/Doc/using/cmdline.rst +++ b/Doc/using/cmdline.rst @@ -370,7 +370,7 @@ Miscellaneous options Hash randomization is intended to provide protection against a denial-of-service caused by carefully chosen inputs that exploit the worst case performance of a dict construction, O(n\ :sup:`2`) complexity. See - http://www.ocert.org/advisories/ocert-2011-003.html for details. + http://ocert.org/advisories/ocert-2011-003.html for details. :envvar:`PYTHONHASHSEED` allows you to set a fixed value for the hash seed secret. diff --git a/Doc/using/mac.rst b/Doc/using/mac.rst index 9ae0270eaee7ab..69cd5c92d884d0 100644 --- a/Doc/using/mac.rst +++ b/Doc/using/mac.rst @@ -66,7 +66,7 @@ number of standard Unix command line editors, :program:`vim` and :program:`BBEdit` or :program:`TextWrangler` from Bare Bones Software (see http://www.barebones.com/products/bbedit/index.html) are good choices, as is :program:`TextMate` (see https://macromates.com/). Other editors include -:program:`Gvim` (https://macvim-dev.github.io/macvim/) and :program:`Aquamacs` +:program:`Gvim` (https://macvim.org/macvim/) and :program:`Aquamacs` (http://aquamacs.org/). To run your script from the Terminal window you must make sure that diff --git a/Doc/using/windows.rst b/Doc/using/windows.rst index 380950eb507ffb..43e3c72f3e1cde 100644 --- a/Doc/using/windows.rst +++ b/Doc/using/windows.rst @@ -541,7 +541,7 @@ Besides the standard CPython distribution, there are modified packages including additional functionality. The following is a list of popular versions and their key features: -`ActivePython `_ +`ActivePython `_ Installer with multi-platform compatibility, documentation, PyWin32 `Anaconda `_ diff --git a/Doc/whatsnew/2.0.rst b/Doc/whatsnew/2.0.rst index 4bcb2acae1e640..0eefefd863a68f 100644 --- a/Doc/whatsnew/2.0.rst +++ b/Doc/whatsnew/2.0.rst @@ -933,7 +933,7 @@ using it:: parser.parse( 'hamlet.xml' ) For more information, consult the Python documentation, or the XML HOWTO at -http://pyxml.sourceforge.net/topics/howto/xml-howto.html. +https://pyxml.sourceforge.net/topics/howto/xml-howto.html. DOM Support diff --git a/Doc/whatsnew/2.1.rst b/Doc/whatsnew/2.1.rst index 0136de58774038..676da702b39693 100644 --- a/Doc/whatsnew/2.1.rst +++ b/Doc/whatsnew/2.1.rst @@ -613,7 +613,7 @@ New and Improved Modules framework based on running embedded examples in docstrings and comparing the results against the expected output. PyUnit, contributed by Steve Purcell, is a unit testing framework inspired by JUnit, which was in turn an adaptation of - Kent Beck's Smalltalk testing framework. See http://pyunit.sourceforge.net/ for + Kent Beck's Smalltalk testing framework. See https://pyunit.sourceforge.net/ for more information about PyUnit. * The :mod:`difflib` module contains a class, :class:`SequenceMatcher`, which diff --git a/Doc/whatsnew/2.2.rst b/Doc/whatsnew/2.2.rst index 0c3bfda1933957..82aff0be1ed3b3 100644 --- a/Doc/whatsnew/2.2.rst +++ b/Doc/whatsnew/2.2.rst @@ -632,10 +632,10 @@ queen threatens another) and the Knight's Tour (a route that takes a knight to every square of an $NxN$ chessboard without visiting any square twice). The idea of generators comes from other programming languages, especially Icon -(https://www.cs.arizona.edu/icon/), where the idea of generators is central. In +(https://www2.cs.arizona.edu/icon/), where the idea of generators is central. In Icon, every expression and function call behaves like a generator. One example from "An Overview of the Icon Programming Language" at -https://www.cs.arizona.edu/icon/docs/ipd266.htm gives an idea of what this looks +https://www2.cs.arizona.edu/icon/docs/ipd266.htm gives an idea of what this looks like:: sentence := "Store it in the neighboring harbor" diff --git a/Doc/whatsnew/2.3.rst b/Doc/whatsnew/2.3.rst index c6e2003e92f1b3..4eb864f5092d30 100644 --- a/Doc/whatsnew/2.3.rst +++ b/Doc/whatsnew/2.3.rst @@ -218,10 +218,10 @@ queen threatens another) and the Knight's Tour (a route that takes a knight to every square of an $NxN$ chessboard without visiting any square twice). The idea of generators comes from other programming languages, especially Icon -(https://www.cs.arizona.edu/icon/), where the idea of generators is central. In +(https://www2.cs.arizona.edu/icon/), where the idea of generators is central. In Icon, every expression and function call behaves like a generator. One example from "An Overview of the Icon Programming Language" at -https://www.cs.arizona.edu/icon/docs/ipd266.htm gives an idea of what this looks +https://www2.cs.arizona.edu/icon/docs/ipd266.htm gives an idea of what this looks like:: sentence := "Store it in the neighboring harbor" @@ -728,7 +728,7 @@ module: Importer objects must have a single method, ``find_module(fullname, path=None)``. *fullname* will be a module or package name, e.g. ``string`` or -``distutils.core``. :meth:`find_module` must return a loader object that has a +``distutils.core``. :meth:`!find_module` must return a loader object that has a single method, ``load_module(fullname)``, that creates and returns the corresponding module object. @@ -1332,7 +1332,7 @@ complete list of changes, or look through the CVS logs for all the details. (Contributed by Kevin O'Connor.) * The IDLE integrated development environment has been updated using the code - from the IDLEfork project (http://idlefork.sourceforge.net). The most notable feature is + from the IDLEfork project (https://idlefork.sourceforge.net). The most notable feature is that the code being developed is now executed in a subprocess, meaning that there's no longer any need for manual ``reload()`` operations. IDLE's core code has been incorporated into the standard library as the :mod:`idlelib` package. diff --git a/Doc/whatsnew/2.4.rst b/Doc/whatsnew/2.4.rst index 63e819876ce310..98dc83fe935d5e 100644 --- a/Doc/whatsnew/2.4.rst +++ b/Doc/whatsnew/2.4.rst @@ -756,7 +756,7 @@ API that perform ASCII-only conversions, ignoring the locale setting: :c:expr:`double` to an ASCII string. The code for these functions came from the GLib library -(https://developer.gnome.org/glib/stable/), whose developers kindly +(https://developer-old.gnome.org/glib/2.26/), whose developers kindly relicensed the relevant functions and donated them to the Python Software Foundation. The :mod:`locale` module can now change the numeric locale, letting extensions such as GTK+ produce the correct results. diff --git a/Doc/whatsnew/2.6.rst b/Doc/whatsnew/2.6.rst index 4ee2aacb108a36..84bb651e68eed5 100644 --- a/Doc/whatsnew/2.6.rst +++ b/Doc/whatsnew/2.6.rst @@ -1433,7 +1433,7 @@ one, :func:`math.trunc`, that's been backported to Python 2.6. `Scheme's numerical tower `__, from the Guile manual. - `Scheme's number datatypes `__ from the R5RS Scheme specification. + `Scheme's number datatypes `__ from the R5RS Scheme specification. The :mod:`fractions` Module @@ -2363,7 +2363,7 @@ changes, or look through the Subversion logs for all the details. negotiation itself. (Patch contributed by Bill Fenner; :issue:`829951`.) -* The :mod:`socket` module now supports TIPC (http://tipc.sourceforge.net/), +* The :mod:`socket` module now supports TIPC (https://tipc.sourceforge.net/), a high-performance non-IP-based protocol designed for use in clustered environments. TIPC addresses are 4- or 5-tuples. (Contributed by Alberto Bertogli; :issue:`1646`.) diff --git a/Doc/whatsnew/3.0.rst b/Doc/whatsnew/3.0.rst index 63b24748d8aab6..f9ac13036cbc8d 100644 --- a/Doc/whatsnew/3.0.rst +++ b/Doc/whatsnew/3.0.rst @@ -840,7 +840,7 @@ Builtins need it; however, 99 percent of the time an explicit :keyword:`for` loop is more readable. -* Removed :func:`reload`. Use :func:`imp.reload`. +* Removed :func:`reload`. Use :func:`!imp.reload`. * Removed. :meth:`dict.has_key` -- use the :keyword:`in` operator instead. diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index f6a48ed2680c14..661eeaedbfc0d0 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -1608,11 +1608,11 @@ Deprecated * Starting in this release, there will be a concerted effort to begin cleaning up old import semantics that were kept for Python 2.7 compatibility. Specifically, - :meth:`~importlib.abc.PathEntryFinder.find_loader`/:meth:`~importlib.abc.Finder.find_module` + :meth:`!find_loader`/:meth:`!find_module` (superseded by :meth:`~importlib.abc.Finder.find_spec`), :meth:`~importlib.abc.Loader.load_module` (superseded by :meth:`~importlib.abc.Loader.exec_module`), - :meth:`~importlib.abc.Loader.module_repr` (which the import system + :meth:`!module_repr` (which the import system takes care of for you), the ``__package__`` attribute (superseded by ``__spec__.parent``), the ``__loader__`` attribute (superseded by ``__spec__.loader``), and the ``__cached__`` attribute @@ -1645,8 +1645,8 @@ Deprecated :meth:`~importlib.abc.Loader.exec_module` is preferred. (Contributed by Brett Cannon in :issue:`26131`.) -* The use of :meth:`importlib.abc.MetaPathFinder.find_module` and - :meth:`importlib.abc.PathEntryFinder.find_module` by the import system now +* The use of :meth:`!importlib.abc.MetaPathFinder.find_module` and + :meth:`!importlib.abc.PathEntryFinder.find_module` by the import system now trigger an :exc:`ImportWarning` as :meth:`importlib.abc.MetaPathFinder.find_spec` and :meth:`importlib.abc.PathEntryFinder.find_spec` @@ -1654,53 +1654,53 @@ Deprecated :func:`importlib.util.spec_from_loader` to help in porting. (Contributed by Brett Cannon in :issue:`42134`.) -* The use of :meth:`importlib.abc.PathEntryFinder.find_loader` by the import +* The use of :meth:`!importlib.abc.PathEntryFinder.find_loader` by the import system now triggers an :exc:`ImportWarning` as :meth:`importlib.abc.PathEntryFinder.find_spec` is preferred. You can use :func:`importlib.util.spec_from_loader` to help in porting. (Contributed by Brett Cannon in :issue:`43672`.) * The various implementations of - :meth:`importlib.abc.MetaPathFinder.find_module` ( - :meth:`importlib.machinery.BuiltinImporter.find_module`, - :meth:`importlib.machinery.FrozenImporter.find_module`, - :meth:`importlib.machinery.WindowsRegistryFinder.find_module`, - :meth:`importlib.machinery.PathFinder.find_module`, - :meth:`importlib.abc.MetaPathFinder.find_module` ), - :meth:`importlib.abc.PathEntryFinder.find_module` ( - :meth:`importlib.machinery.FileFinder.find_module` ), and - :meth:`importlib.abc.PathEntryFinder.find_loader` ( - :meth:`importlib.machinery.FileFinder.find_loader` ) + :meth:`!importlib.abc.MetaPathFinder.find_module` ( + :meth:`!importlib.machinery.BuiltinImporter.find_module`, + :meth:`!importlib.machinery.FrozenImporter.find_module`, + :meth:`!importlib.machinery.WindowsRegistryFinder.find_module`, + :meth:`!importlib.machinery.PathFinder.find_module`, + :meth:`!importlib.abc.MetaPathFinder.find_module` ), + :meth:`!importlib.abc.PathEntryFinder.find_module` ( + :meth:`!importlib.machinery.FileFinder.find_module` ), and + :meth:`!importlib.abc.PathEntryFinder.find_loader` ( + :meth:`!importlib.machinery.FileFinder.find_loader` ) now raise :exc:`DeprecationWarning` and are slated for removal in Python 3.12 (previously they were documented as deprecated in Python 3.4). (Contributed by Brett Cannon in :issue:`42135`.) -* :class:`importlib.abc.Finder` is deprecated (including its sole method, - :meth:`~importlib.abc.Finder.find_module`). Both +* :class:`!importlib.abc.Finder` is deprecated (including its sole method, + :meth:`!find_module`). Both :class:`importlib.abc.MetaPathFinder` and :class:`importlib.abc.PathEntryFinder` no longer inherit from the class. Users should inherit from one of these two classes as appropriate instead. (Contributed by Brett Cannon in :issue:`42135`.) -* The deprecations of :mod:`imp`, :func:`importlib.find_loader`, - :func:`importlib.util.set_package_wrapper`, - :func:`importlib.util.set_loader_wrapper`, - :func:`importlib.util.module_for_loader`, - :class:`pkgutil.ImpImporter`, and - :class:`pkgutil.ImpLoader` have all been updated to list Python 3.12 as the +* The deprecations of :mod:`!imp`, :func:`!importlib.find_loader`, + :func:`!importlib.util.set_package_wrapper`, + :func:`!importlib.util.set_loader_wrapper`, + :func:`!importlib.util.module_for_loader`, + :class:`!pkgutil.ImpImporter`, and + :class:`!pkgutil.ImpLoader` have all been updated to list Python 3.12 as the slated version of removal (they began raising :exc:`DeprecationWarning` in previous versions of Python). (Contributed by Brett Cannon in :issue:`43720`.) * The import system now uses the ``__spec__`` attribute on modules before - falling back on :meth:`~importlib.abc.Loader.module_repr` for a module's + falling back on :meth:`!module_repr` for a module's ``__repr__()`` method. Removal of the use of ``module_repr()`` is scheduled for Python 3.12. (Contributed by Brett Cannon in :issue:`42137`.) -* :meth:`importlib.abc.Loader.module_repr`, - :meth:`importlib.machinery.FrozenLoader.module_repr`, and - :meth:`importlib.machinery.BuiltinLoader.module_repr` are deprecated and +* :meth:`!importlib.abc.Loader.module_repr`, + :meth:`!importlib.machinery.FrozenLoader.module_repr`, and + :meth:`!importlib.machinery.BuiltinLoader.module_repr` are deprecated and slated for removal in Python 3.12. (Contributed by Brett Cannon in :issue:`42136`.) diff --git a/Doc/whatsnew/3.11.rst b/Doc/whatsnew/3.11.rst index 10fcfb6a0b5639..7a479c6e56a9f0 100644 --- a/Doc/whatsnew/3.11.rst +++ b/Doc/whatsnew/3.11.rst @@ -220,7 +220,7 @@ The copy of the :ref:`launcher` included with Python 3.11 has been significantly updated. It now supports company/tag syntax as defined in :pep:`514` using the ``-V:/`` argument instead of the limited ``-.``. This allows launching distributions other than ``PythonCore``, -the one hosted on `python.org `_. +the one hosted on `python.org `_. When using ``-V:`` selectors, either company or tag can be omitted, but all installs will be searched. For example, ``-V:OtherPython/`` will select the @@ -666,19 +666,11 @@ enum for :meth:`~object.__str__` and :meth:`~object.__format__` (used by :func:`str`, :func:`format` and :term:`f-string`\s). -* Changed :class:`~enum.IntEnum`, :class:`~enum.IntFlag` and :class:`~enum.StrEnum` - to now inherit from :class:`~enum.ReprEnum`, - so their :func:`str` output now matches :func:`format` - (both ``str(AnIntEnum.ONE)`` and ``format(AnIntEnum.ONE)`` return ``'1'``, - whereas before ``str(AnIntEnum.ONE)`` returned ``'AnIntEnum.ONE'``. - -* Changed :meth:`Enum.__format__() ` - (the default for :func:`format`, :meth:`str.format` and :term:`f-string`\s) - of enums with mixed-in types (e.g. :class:`int`, :class:`str`) - to also include the class name in the output, not just the member's key. - This matches the existing behavior of :meth:`enum.Enum.__str__`, - returning e.g. ``'AnEnum.MEMBER'`` for an enum ``AnEnum(str, Enum)`` - instead of just ``'MEMBER'``. +* Changed :meth:`Enum.__format__() ` (the default for + :func:`format`, :meth:`str.format` and :term:`f-string`\s) to always produce + the same result as :meth:`Enum.__str__()`: for enums inheriting from + :class:`~enum.ReprEnum` it will be the member's value; for all other enums + it will be the enum and member name (e.g. ``Color.RED``). * Added a new *boundary* class parameter to :class:`~enum.Flag` enums and the :class:`~enum.FlagBoundary` enum with its options, @@ -1880,28 +1872,28 @@ C APIs pending removal are * The :mod:`asynchat` module * The :mod:`asyncore` module * The :ref:`entire distutils package ` -* The :mod:`imp` module +* The :mod:`!imp` module * The :class:`typing.io ` namespace * The :class:`typing.re ` namespace * :func:`!cgi.log` -* :func:`importlib.find_loader` -* :meth:`importlib.abc.Loader.module_repr` -* :meth:`importlib.abc.MetaPathFinder.find_module` -* :meth:`importlib.abc.PathEntryFinder.find_loader` -* :meth:`importlib.abc.PathEntryFinder.find_module` +* :func:`!importlib.find_loader` +* :meth:`!importlib.abc.Loader.module_repr` +* :meth:`!importlib.abc.MetaPathFinder.find_module` +* :meth:`!importlib.abc.PathEntryFinder.find_loader` +* :meth:`!importlib.abc.PathEntryFinder.find_module` * :meth:`!importlib.machinery.BuiltinImporter.find_module` * :meth:`!importlib.machinery.BuiltinLoader.module_repr` * :meth:`!importlib.machinery.FileFinder.find_loader` * :meth:`!importlib.machinery.FileFinder.find_module` * :meth:`!importlib.machinery.FrozenImporter.find_module` * :meth:`!importlib.machinery.FrozenLoader.module_repr` -* :meth:`importlib.machinery.PathFinder.find_module` +* :meth:`!importlib.machinery.PathFinder.find_module` * :meth:`!importlib.machinery.WindowsRegistryFinder.find_module` -* :func:`importlib.util.module_for_loader` +* :func:`!importlib.util.module_for_loader` * :func:`!importlib.util.set_loader_wrapper` * :func:`!importlib.util.set_package_wrapper` -* :class:`pkgutil.ImpImporter` -* :class:`pkgutil.ImpLoader` +* :class:`!pkgutil.ImpImporter` +* :class:`!pkgutil.ImpLoader` * :meth:`pathlib.Path.link_to` * :func:`!sqlite3.enable_shared_cache` * :func:`!sqlite3.OptimizedUnicode` @@ -2489,7 +2481,7 @@ Porting to Python 3.11 #endif Or use the `pythoncapi_compat project - `__ to get these two + `__ to get these two functions on older Python versions. * Changes of the :c:type:`PyThreadState` structure members: @@ -2541,8 +2533,8 @@ Porting to Python 3.11 } #endif - Or use `the pythoncapi_compat project - `__ to get these functions + Or use `the pythoncapi-compat project + `__ to get these functions on old Python functions. * Distributors are encouraged to build Python with the optimized Blake2 diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst index f4ee30b0d4d9eb..eb13d4bf031c95 100644 --- a/Doc/whatsnew/3.12.rst +++ b/Doc/whatsnew/3.12.rst @@ -68,13 +68,17 @@ Summary -- Release highlights New typing features: +* :pep:`688`: Making the buffer protocol accessible in Python + * :ref:`whatsnew312-pep692` +* :pep:`698`: Override Decorator for Static Typing + Important deprecations, removals or restrictions: -* :pep:`623`, Remove wstr from Unicode +* :pep:`623`: Remove wstr from Unicode -* :pep:`632`, Remove the ``distutils`` package. +* :pep:`632`: Remove the ``distutils`` package Improved Error Messages ======================= @@ -149,6 +153,43 @@ New Features In Python 3.14, the default will switch to ``'data'``. (Contributed by Petr Viktorin in :pep:`706`.) +.. _whatsnew312-pep709: + +PEP 709: Comprehension inlining +------------------------------- + +Dictionary, list, and set comprehensions are now inlined, rather than creating a +new single-use function object for each execution of the comprehension. This +speeds up execution of a comprehension by up to 2x. + +Comprehension iteration variables remain isolated; they don't overwrite a +variable of the same name in the outer scope, nor are they visible after the +comprehension. This isolation is now maintained via stack/locals manipulation, +not via separate function scope. + +Inlining does result in a few visible behavior changes: + +* There is no longer a separate frame for the comprehension in tracebacks, + and tracing/profiling no longer shows the comprehension as a function call. +* Calling :func:`locals` inside a comprehension now includes variables + from outside the comprehension, and no longer includes the synthetic ``.0`` + variable for the comprehension "argument". + +Contributed by Carl Meyer and Vladimir Matveev in :pep:`709`. + +PEP 688: Making the buffer protocol accessible in Python +-------------------------------------------------------- + +:pep:`688` introduces a way to use the :ref:`buffer protocol ` +from Python code. Classes that implement the :meth:`~object.__buffer__` method +are now usable as buffer types. + +The new :class:`collections.abc.Buffer` ABC provides a standard +way to represent buffer objects, for example in type annotations. +The new :class:`inspect.BufferFlags` enum represents the flags that +can be used to customize buffer creation. +(Contributed by Jelle Zijlstra in :gh:`102500`.) + New Features Related to Type Hints ================================== @@ -179,7 +220,6 @@ See :pep:`692` for more details. (PEP written by Franek Magiera) - Other Language Changes ====================== @@ -239,7 +279,8 @@ Other Language Changes * When a ``try-except*`` construct handles the entire :exc:`ExceptionGroup` and raises one other exception, that exception is no longer wrapped in an - :exc:`ExceptionGroup`. (Contributed by Irit Katriel in :gh:`103590`.) + :exc:`ExceptionGroup`. Also changed in version 3.11.4. (Contributed by Irit + Katriel in :gh:`103590`.) New Modules @@ -265,6 +306,11 @@ asyncio writing to sockets and uses :meth:`~socket.socket.sendmsg` if the platform supports it. (Contributed by Kumar Aditya in :gh:`91166`.) +* Added :func:`asyncio.eager_task_factory` and :func:`asyncio.create_eager_task_factory` + functions to allow opting an event loop in to eager task execution, + making some use-cases 2x to 5x faster. + (Contributed by Jacob Bower & Itamar O in :gh:`102853`, :gh:`104140`, and :gh:`104138`) + * On Linux, :mod:`asyncio` uses :class:`~asyncio.PidfdChildWatcher` by default if :func:`os.pidfd_open` is available and functional instead of :class:`~asyncio.ThreadedChildWatcher`. @@ -325,12 +371,18 @@ inspect (Contributed by Thomas Krennwallner in :issue:`35759`.) * The performance of :func:`inspect.getattr_static` has been considerably - improved. Most calls to the function should be around 2x faster than they - were in Python 3.11. (Contributed by Alex Waygood in :gh:`103193`.) + improved. Most calls to the function should be at least 2x faster than they + were in Python 3.11, and some may be 6x faster or more. (Contributed by Alex + Waygood in :gh:`103193`.) pathlib ------- +* Add support for subclassing :class:`pathlib.PurePath` and + :class:`~pathlib.Path`, plus their Posix- and Windows-specific variants. + Subclasses may override the :meth:`~pathlib.PurePath.with_segments` method + to pass information between path instances. + * Add :meth:`~pathlib.Path.walk` for walking the directory trees and generating all file or directory names within them, similar to :func:`os.walk`. (Contributed by Stanislav Zmiev in :gh:`90385`.) @@ -408,6 +460,14 @@ os.path * Add :func:`os.path.splitroot` to split a path into a triad ``(drive, root, tail)``. (Contributed by Barney Gale in :gh:`101000`.) +pdb +--- + +* Add convenience variables to hold values temporarily for debug session + and provide quick access to values like the current frame or the return + value. + (Contributed by Tian Gao in :gh:`103693`.) + shutil ------ @@ -567,7 +627,7 @@ typing :func:`runtime-checkable protocols ` has changed significantly. Most ``isinstance()`` checks against protocols with only a few members should be at least 2x faster than in 3.11, and some may be 20x - faster or more. However, ``isinstance()`` checks against protocols with seven + faster or more. However, ``isinstance()`` checks against protocols with fourteen or more members may be slower than in Python 3.11. (Contributed by Alex Waygood in :gh:`74690` and :gh:`103193`.) @@ -705,6 +765,12 @@ Deprecated replaced by :data:`calendar.Month.JANUARY` and :data:`calendar.Month.FEBRUARY`. (Contributed by Prince Roshan in :gh:`103636`.) +* The bitwise inversion operator (``~``) on bool is deprecated. It will throw an + error in Python 3.14. Use ``not`` for logical negation of bools instead. + In the rare case that you really need the bitwise inversion of the underlying + ``int``, convert to int explicitly with ``~int(x)``. (Contributed by Tim Hoffmann + in :gh:`103487`.) + Pending Removal in Python 3.13 ------------------------------ @@ -760,6 +826,11 @@ Pending Removal in Python 3.14 (Contributed by Jason R. Coombs and Hugo van Kemenade in :gh:`93963`.) +* Deprecated :class:`collections.abc.ByteString`. + Prefer :class:`Sequence` or :class:`collections.abc.Buffer`. + For use in typing, prefer a union, like ``bytes | bytearray``, or :class:`collections.abc.Buffer`. + (Contributed by Shantanu Jain in :gh:`91896`.) + * Creating immutable types (:data:`Py_TPFLAGS_IMMUTABLETYPE`) with mutable bases using the C API. @@ -793,6 +864,24 @@ Pending Removal in Python 3.14 * The *onerror* argument of :func:`shutil.rmtree` is deprecated in 3.12, and will be removed in 3.14. +* :func:`pkgutil.find_loader` and :func:`pkgutil.get_loader` + now raise :exc:`DeprecationWarning`; + use :func:`importlib.util.find_spec` instead. + (Contributed by Nikita Sobolev in :gh:`97850`.) + +* The following :mod:`ast` features have been deprecated in documentation since + Python 3.8, now cause a :exc:`DeprecationWarning` to be emitted at runtime + when they are accessed or used, and will be removed in Python 3.14: + + * :class:`!ast.Num` + * :class:`!ast.Str` + * :class:`!ast.Bytes` + * :class:`!ast.NameConstant` + * :class:`!ast.Ellipsis` + + Use :class:`ast.Constant` instead. + (Contributed by Serhiy Storchaka in :gh:`90953`.) + Pending Removal in Future Versions ---------------------------------- @@ -972,14 +1061,20 @@ Removed * Many previously deprecated cleanups in :mod:`importlib` have now been completed: - * References to, and support for ``module_repr()`` has been eradicated. + * References to, and support for :meth:`!module_repr()` has been removed. (Contributed by Barry Warsaw in :gh:`97850`.) -* ``importlib.util.set_package`` has been removed. - (Contributed by Brett Cannon in :gh:`65961`.) + * ``importlib.util.set_package`` has been removed. (Contributed by Brett + Cannon in :gh:`65961`.) + + * Support for ``find_loader()`` and ``find_module()`` APIs have been + removed. (Contributed by Barry Warsaw in :gh:`98040`.) + + * ``importlib.abc.Finder``, ``pkg.ImpImporter``, and ``pkg.ImpLoader`` have + been removed. (Contributed by Barry Warsaw in :gh:`98040`.) -* The ``imp`` module has been removed. (Contributed by Barry Warsaw in - :gh:`98040`.) + * The :mod:`!imp` module has been removed. (Contributed by Barry Warsaw in + :gh:`98040`.) * Removed the ``suspicious`` rule from the documentation Makefile, and removed ``Doc/tools/rstlint.py``, both in favor of `sphinx-lint @@ -1098,6 +1193,14 @@ Build Changes optimization levels (0, 1, 2) at once. (Contributed by Victor Stinner in :gh:`99289`.) +* Add platform triplets for 64-bit LoongArch: + + * loongarch64-linux-gnusf + * loongarch64-linux-gnuf32 + * loongarch64-linux-gnu + + (Contributed by Zhang Na in :gh:`90656`.) + C API Changes ============= @@ -1128,6 +1231,21 @@ New Features (Contributed by Petr Viktorin in :gh:`101101`.) +* :pep:`697`: Added API for extending types whose instance memory layout is + opaque: + + - :c:member:`PyType_Spec.basicsize` can be zero or negative to specify + inheriting or extending the base class size. + - :c:func:`PyObject_GetTypeData` and :c:func:`PyType_GetTypeDataSize` + added to allow access to subclass-specific instance data. + - :const:`Py_TPFLAGS_ITEMS_AT_END` and :c:func:`PyObject_GetItemData` + added to allow safely extending certain variable-sized types, including + :c:var:`PyType_Type`. + - :c:macro:`Py_RELATIVE_OFFSET` added to allow defining + :c:type:`members ` in terms of a subclass-specific struct. + + (Contributed by Petr Viktorin in :gh:`103509`.) + * Added the new limited C API function :c:func:`PyType_FromMetaclass`, which generalizes the existing :c:func:`PyType_FromModuleAndSpec` using an additional metaclass argument. @@ -1303,6 +1421,21 @@ Porting to Python 3.12 available on debug builds. If you happen to be using it then you'll need to start using ``_Py_GetGlobalRefTotal()``. +* The following functions now select an appropriate metaclass for the newly + created type: + + * :c:func:`PyType_FromSpec` + * :c:func:`PyType_FromSpecWithBases` + * :c:func:`PyType_FromModuleAndSpec` + + Creating classes whose metaclass overrides :c:member:`~PyTypeObject.tp_new` + is deprecated, and in Python 3.14+ it will be disallowed. + Note that these functions ignore ``tp_new`` of the metaclass, possibly + allowing incomplete initialization. + + Note that :c:func:`PyType_FromMetaclass` (added in Python 3.12) + already disallows creating classes whose metaclass overrides ``tp_new``. + Deprecated ---------- @@ -1379,6 +1512,11 @@ Deprecated * ``_PyErr_ChainExceptions`` is deprecated. Use ``_PyErr_ChainExceptions1`` instead. (Contributed by Irit Katriel in :gh:`102192`.) +* Using :c:func:`PyType_FromSpec`, :c:func:`PyType_FromSpecWithBases` + or :c:func:`PyType_FromModuleAndSpec` to create a class whose metaclass + overrides :c:member:`~PyTypeObject.tp_new` is deprecated. + Call the metaclass instead. + Removed ------- diff --git a/Doc/whatsnew/3.2.rst b/Doc/whatsnew/3.2.rst index 1b1455b72b9291..7af0c0288376fd 100644 --- a/Doc/whatsnew/3.2.rst +++ b/Doc/whatsnew/3.2.rst @@ -319,7 +319,7 @@ aspects that are visible to the programmer: >>> collections.__cached__ # doctest: +SKIP 'c:/py32/lib/__pycache__/collections.cpython-32.pyc' -* The tag that is unique to each interpreter is accessible from the :mod:`imp` +* The tag that is unique to each interpreter is accessible from the :mod:`!imp` module: >>> import imp # doctest: +SKIP @@ -328,7 +328,7 @@ aspects that are visible to the programmer: * Scripts that try to deduce source filename from the imported file now need to be smarter. It is no longer sufficient to simply strip the "c" from a ".pyc" - filename. Instead, use the new functions in the :mod:`imp` module: + filename. Instead, use the new functions in the :mod:`!imp` module: >>> imp.source_from_cache('c:/py32/lib/__pycache__/collections.cpython-32.pyc') # doctest: +SKIP 'c:/py32/lib/collections.py' @@ -785,8 +785,8 @@ functools (Contributed by Raymond Hettinger and incorporating design ideas from Jim Baker, Miki Tebeka, and Nick Coghlan; see `recipe 498245 - `_\, `recipe 577479 - `_\, :issue:`10586`, and + `_\, `recipe 577479 + `_\, :issue:`10586`, and :issue:`10593`.) * The :func:`functools.wraps` decorator now adds a :attr:`__wrapped__` attribute @@ -2603,7 +2603,7 @@ Also, there were a number of updates to the Mac OS X build, see for details. For users running a 32/64-bit build, there is a known problem with the default Tcl/Tk on Mac OS X 10.6. Accordingly, we recommend installing an updated alternative such as -`ActiveState Tcl/Tk 8.5.9 `_\. +`ActiveState Tcl/Tk 8.5.9 `_\. See https://www.python.org/download/mac/tcltk/ for additional details. Porting to Python 3.2 diff --git a/Doc/whatsnew/3.3.rst b/Doc/whatsnew/3.3.rst index 9e8d42469b019c..f121652ba51cbc 100644 --- a/Doc/whatsnew/3.3.rst +++ b/Doc/whatsnew/3.3.rst @@ -685,7 +685,7 @@ through normal attribute access. Using importlib as the Implementation of Import =============================================== :issue:`2377` - Replace __import__ w/ importlib.__import__ -:issue:`13959` - Re-implement parts of :mod:`imp` in pure Python +:issue:`13959` - Re-implement parts of :mod:`!imp` in pure Python :issue:`14605` - Make import machinery explicit :issue:`14646` - Require loaders set __loader__ and __package__ @@ -714,7 +714,7 @@ to properly delineate between :term:`meta path finders ` and :term:`path entry finders ` by introducing :class:`importlib.abc.MetaPathFinder` and :class:`importlib.abc.PathEntryFinder`, respectively. The old ABC of -:class:`importlib.abc.Finder` is now only provided for backwards-compatibility +:class:`!importlib.abc.Finder` is now only provided for backwards-compatibility and does not enforce any method requirements. In terms of finders, :class:`importlib.machinery.FileFinder` exposes the @@ -762,7 +762,7 @@ Loaders are also now expected to set the ``__package__`` attribute from from :mod:`importlib` and import itself is setting the attribute post-load. ``None`` is now inserted into :attr:`sys.path_importer_cache` when no finder -can be found on :attr:`sys.path_hooks`. Since :class:`imp.NullImporter` is not +can be found on :attr:`sys.path_hooks`. Since :class:`!imp.NullImporter` is not directly exposed on :attr:`sys.path_hooks` it could no longer be relied upon to always be available to use as a value representing no finder found. @@ -1893,7 +1893,7 @@ socket * The :class:`~socket.socket` class now supports the PF_RDS protocol family (https://en.wikipedia.org/wiki/Reliable_Datagram_Sockets and - https://oss.oracle.com/projects/rds/). + `https://oss.oracle.com/projects/rds `__). * The :class:`~socket.socket` class now supports the ``PF_SYSTEM`` protocol family on OS X. (Contributed by Michael Goderbauer in :issue:`13777`.) @@ -2385,12 +2385,12 @@ Porting Python code * Because ``None`` is now inserted into :attr:`sys.path_importer_cache`, if you are clearing out entries in the dictionary of paths that do not have a finder, you will need to remove keys paired with values of ``None`` **and** - :class:`imp.NullImporter` to be backwards-compatible. This will lead to extra + :class:`!imp.NullImporter` to be backwards-compatible. This will lead to extra overhead on older versions of Python that re-insert ``None`` into :attr:`sys.path_importer_cache` where it represents the use of implicit finders, but semantically it should not change anything. -* :class:`importlib.abc.Finder` no longer specifies a ``find_module()`` abstract +* :class:`!importlib.abc.Finder` no longer specifies a ``find_module()`` abstract method that must be implemented. If you were relying on subclasses to implement that method, make sure to check for the method's existence first. You will probably want to check for ``find_loader()`` first, though, in the diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst index b7bb505a818482..45bb91833a352b 100644 --- a/Doc/whatsnew/3.4.rst +++ b/Doc/whatsnew/3.4.rst @@ -991,18 +991,18 @@ for the :meth:`~importlib.abc.InspectLoader.get_code` method. However, it will normally be desirable to override the default implementation for performance reasons. (Contributed by Brett Cannon in :issue:`18072`.) -The :func:`~importlib.reload` function has been moved from :mod:`imp` to -:mod:`importlib` as part of the :mod:`imp` module deprecation. (Contributed by +The :func:`~importlib.reload` function has been moved from :mod:`!imp` to +:mod:`importlib` as part of the :mod:`!imp` module deprecation. (Contributed by Berker Peksag in :issue:`18193`.) :mod:`importlib.util` now has a :data:`~importlib.util.MAGIC_NUMBER` attribute providing access to the bytecode version number. This replaces the -:func:`~imp.get_magic` function in the deprecated :mod:`imp` module. +:func:`!get_magic` function in the deprecated :mod:`!imp` module. (Contributed by Brett Cannon in :issue:`18192`.) New :mod:`importlib.util` functions :func:`~importlib.util.cache_from_source` and :func:`~importlib.util.source_from_cache` replace the same-named functions -in the deprecated :mod:`imp` module. (Contributed by Brett Cannon in +in the deprecated :mod:`!imp` module. (Contributed by Brett Cannon in :issue:`18194`.) The :mod:`importlib` bootstrap :class:`.NamespaceLoader` now conforms to @@ -2077,31 +2077,31 @@ Deprecations in the Python API ------------------------------ * As mentioned in :ref:`whatsnew-pep-451`, a number of :mod:`importlib` - methods and functions are deprecated: :meth:`importlib.find_loader` is + methods and functions are deprecated: :meth:`!importlib.find_loader` is replaced by :func:`importlib.util.find_spec`; - :meth:`importlib.machinery.PathFinder.find_module` is replaced by + :meth:`!importlib.machinery.PathFinder.find_module` is replaced by :meth:`importlib.machinery.PathFinder.find_spec`; - :meth:`importlib.abc.MetaPathFinder.find_module` is replaced by + :meth:`!importlib.abc.MetaPathFinder.find_module` is replaced by :meth:`importlib.abc.MetaPathFinder.find_spec`; - :meth:`importlib.abc.PathEntryFinder.find_loader` and - :meth:`~importlib.abc.PathEntryFinder.find_module` are replaced by + :meth:`!importlib.abc.PathEntryFinder.find_loader` and + :meth:`!find_module` are replaced by :meth:`importlib.abc.PathEntryFinder.find_spec`; all of the ``xxxLoader`` ABC - ``load_module`` methods (:meth:`importlib.abc.Loader.load_module`, - :meth:`importlib.abc.InspectLoader.load_module`, - :meth:`importlib.abc.FileLoader.load_module`, - :meth:`importlib.abc.SourceLoader.load_module`) should no longer be + ``load_module`` methods (:meth:`!importlib.abc.Loader.load_module`, + :meth:`!importlib.abc.InspectLoader.load_module`, + :meth:`!importlib.abc.FileLoader.load_module`, + :meth:`!importlib.abc.SourceLoader.load_module`) should no longer be implemented, instead loaders should implement an ``exec_module`` method (:meth:`importlib.abc.Loader.exec_module`, :meth:`importlib.abc.InspectLoader.exec_module` :meth:`importlib.abc.SourceLoader.exec_module`) and let the import system take care of the rest; and - :meth:`importlib.abc.Loader.module_repr`, - :meth:`importlib.util.module_for_loader`, :meth:`importlib.util.set_loader`, - and :meth:`importlib.util.set_package` are no longer needed because their + :meth:`!importlib.abc.Loader.module_repr`, + :meth:`!importlib.util.module_for_loader`, :meth:`!importlib.util.set_loader`, + and :meth:`!importlib.util.set_package` are no longer needed because their functions are now handled automatically by the import system. -* The :mod:`imp` module is pending deprecation. To keep compatibility with +* The :mod:`!imp` module is pending deprecation. To keep compatibility with Python 2/3 code bases, the module's removal is currently not scheduled. * The :mod:`formatter` module is pending deprecation and is slated for removal @@ -2277,7 +2277,7 @@ Changes in the Python API in a backwards-compatible fashion, use e.g. ``getattr(module, '__loader__', None) is not None``. (:issue:`17115`.) -* :meth:`importlib.util.module_for_loader` now sets ``__loader__`` and +* :meth:`!importlib.util.module_for_loader` now sets ``__loader__`` and ``__package__`` unconditionally to properly support reloading. If this is not desired then you will need to set these attributes manually. You can use :func:`importlib.util.module_to_load` for module management. @@ -2300,7 +2300,7 @@ Changes in the Python API then you can see if the module's ``__spec__.location`` is set to ``'frozen'``, check if the loader is a subclass of :class:`importlib.machinery.FrozenImporter`, - or if Python 2 compatibility is necessary you can use :func:`imp.is_frozen`. + or if Python 2 compatibility is necessary you can use :func:`!imp.is_frozen`. * :func:`py_compile.compile` now raises :exc:`FileExistsError` if the file path it would write to is a symlink or a non-regular file. This is to act as a diff --git a/Doc/whatsnew/3.5.rst b/Doc/whatsnew/3.5.rst index f872579ef546f5..14b6425cea699e 100644 --- a/Doc/whatsnew/3.5.rst +++ b/Doc/whatsnew/3.5.rst @@ -425,7 +425,7 @@ are declared in the annotations:: While these annotations are available at runtime through the usual :attr:`__annotations__` attribute, *no automatic type checking happens at runtime*. Instead, it is assumed that a separate off-line type checker -(e.g. `mypy `_) will be used for on-demand +(e.g. `mypy `_) will be used for on-demand source code analysis. The type system supports unions, generic types, and a special type @@ -2212,7 +2212,7 @@ for details.) The :c:member:`PyTypeObject.tp_finalize` slot is now part of the stable ABI. Windows builds now require Microsoft Visual C++ 14.0, which -is available as part of `Visual Studio 2015 `_. +is available as part of `Visual Studio 2015 `_. Extension modules now include a platform information tag in their filename on some platforms (the tag is optional, and CPython will import extensions without diff --git a/Doc/whatsnew/3.6.rst b/Doc/whatsnew/3.6.rst index e4294c88b58572..3a681754e25dd7 100644 --- a/Doc/whatsnew/3.6.rst +++ b/Doc/whatsnew/3.6.rst @@ -238,7 +238,7 @@ and the ``__annotations__`` attribute. and Guido van Rossum. Implemented by Ivan Levkivskyi. Tools that use or will use the new syntax: - `mypy `_, + `mypy `_, `pytype `_, PyCharm, etc. @@ -2180,7 +2180,7 @@ Changes in the Python API now raises :exc:`ValueError` for out-of-range values, rather than returning :const:`None`. See :issue:`20059`. -* The :mod:`imp` module now raises a :exc:`DeprecationWarning` instead of +* The :mod:`!imp` module now raises a :exc:`DeprecationWarning` instead of :exc:`PendingDeprecationWarning`. * The following modules have had missing APIs added to their :attr:`__all__` diff --git a/Doc/whatsnew/3.7.rst b/Doc/whatsnew/3.7.rst index df3b636cb9ec46..28f22836d8d09e 100644 --- a/Doc/whatsnew/3.7.rst +++ b/Doc/whatsnew/3.7.rst @@ -2004,11 +2004,11 @@ importlib --------- Methods -:meth:`MetaPathFinder.find_module() ` +:meth:`MetaPathFinder.find_module() ` (replaced by :meth:`MetaPathFinder.find_spec() `) and -:meth:`PathEntryFinder.find_loader() ` +:meth:`PathEntryFinder.find_loader() ` (replaced by :meth:`PathEntryFinder.find_spec() `) both deprecated in Python 3.4 now emit :exc:`DeprecationWarning`. diff --git a/Doc/whatsnew/3.8.rst b/Doc/whatsnew/3.8.rst index 37a6cf24e54562..85e088b64acb2d 100644 --- a/Doc/whatsnew/3.8.rst +++ b/Doc/whatsnew/3.8.rst @@ -2229,7 +2229,7 @@ The benchmarks were measured on an `Intel® Core™ i7-4960HQ processor `_ running the macOS 64-bit builds found at -`python.org `_. +`python.org `_. The benchmark script displays timings in nanoseconds. diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index e974ee3a3f73ed..fd86db96302356 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -849,7 +849,7 @@ in nanoseconds. The benchmarks were measured on an `Intel® Core™ i7-4960HQ processor `_ running the macOS 64-bit builds found at -`python.org `_. +`python.org `_. Deprecated diff --git a/Include/cpython/frameobject.h b/Include/cpython/frameobject.h index 4e19535c656f2c..6f3efe36ede5d8 100644 --- a/Include/cpython/frameobject.h +++ b/Include/cpython/frameobject.h @@ -4,6 +4,8 @@ # error "this header file must not be included directly" #endif +struct _PyInterpreterFrame; + /* Standard object interface */ PyAPI_FUNC(PyFrameObject *) PyFrame_New(PyThreadState *, PyCodeObject *, @@ -27,3 +29,18 @@ PyAPI_FUNC(int) _PyFrame_IsEntryFrame(PyFrameObject *frame); PyAPI_FUNC(int) PyFrame_FastToLocalsWithError(PyFrameObject *f); PyAPI_FUNC(void) PyFrame_FastToLocals(PyFrameObject *); + +/* The following functions are for use by debuggers and other tools + * implementing custom frame evaluators with PEP 523. */ + +/* Returns the code object of the frame (strong reference). + * Does not raise an exception. */ +PyAPI_FUNC(PyCodeObject *) PyUnstable_InterpreterFrame_GetCode(struct _PyInterpreterFrame *frame); + +/* Returns a byte ofsset into the last executed instruction. + * Does not raise an exception. */ +PyAPI_FUNC(int) PyUnstable_InterpreterFrame_GetLasti(struct _PyInterpreterFrame *frame); + +/* Returns the currently executing line number, or -1 if there is no line number. + * Does not raise an exception. */ +PyAPI_FUNC(int) PyUnstable_InterpreterFrame_GetLine(struct _PyInterpreterFrame *frame); diff --git a/Include/cpython/initconfig.h b/Include/cpython/initconfig.h index 79c1023baa9a0f..efae2409b50069 100644 --- a/Include/cpython/initconfig.h +++ b/Include/cpython/initconfig.h @@ -252,7 +252,8 @@ typedef struct { int allow_threads; int allow_daemon_threads; int check_multi_interp_extensions; -} _PyInterpreterConfig; + int own_gil; +} PyInterpreterConfig; #define _PyInterpreterConfig_INIT \ { \ @@ -262,6 +263,7 @@ typedef struct { .allow_threads = 1, \ .allow_daemon_threads = 0, \ .check_multi_interp_extensions = 1, \ + .own_gil = 1, \ } #define _PyInterpreterConfig_LEGACY_INIT \ @@ -272,6 +274,7 @@ typedef struct { .allow_threads = 1, \ .allow_daemon_threads = 1, \ .check_multi_interp_extensions = 0, \ + .own_gil = 0, \ } /* --- Helper functions --------------------------------------- */ diff --git a/Include/cpython/memoryobject.h b/Include/cpython/memoryobject.h index deab3cc89f726e..3837fa8c6ab5aa 100644 --- a/Include/cpython/memoryobject.h +++ b/Include/cpython/memoryobject.h @@ -24,6 +24,7 @@ typedef struct { #define _Py_MEMORYVIEW_FORTRAN 0x004 /* Fortran contiguous layout */ #define _Py_MEMORYVIEW_SCALAR 0x008 /* scalar: ndim = 0 */ #define _Py_MEMORYVIEW_PIL 0x010 /* PIL-style layout */ +#define _Py_MEMORYVIEW_RESTRICTED 0x020 /* Disallow new references to the memoryview's buffer */ typedef struct { PyObject_VAR_HEAD diff --git a/Include/cpython/object.h b/Include/cpython/object.h index ce4d13cd9c28fe..d8eff691039d24 100644 --- a/Include/cpython/object.h +++ b/Include/cpython/object.h @@ -553,6 +553,7 @@ Py_DEPRECATED(3.11) typedef int UsingDeprecatedTrashcanMacro; Py_TRASHCAN_END; \ } while(0); +PyAPI_FUNC(void *) PyObject_GetItemData(PyObject *obj); PyAPI_FUNC(int) _PyObject_VisitManagedDict(PyObject *obj, visitproc visit, void *arg); PyAPI_FUNC(void) _PyObject_ClearManagedDict(PyObject *obj); diff --git a/Include/cpython/objimpl.h b/Include/cpython/objimpl.h index 0b038d31080be9..5a8cdd57c7845b 100644 --- a/Include/cpython/objimpl.h +++ b/Include/cpython/objimpl.h @@ -90,3 +90,6 @@ PyAPI_FUNC(int) PyObject_IS_GC(PyObject *obj); PyAPI_FUNC(int) PyType_SUPPORTS_WEAKREFS(PyTypeObject *type); PyAPI_FUNC(PyObject **) PyObject_GET_WEAKREFS_LISTPTR(PyObject *op); + +PyAPI_FUNC(PyObject *) PyUnstable_Object_GC_NewWithExtraData(PyTypeObject *, + size_t); diff --git a/Include/cpython/pylifecycle.h b/Include/cpython/pylifecycle.h index 79d55711319e55..08569ee683ce0d 100644 --- a/Include/cpython/pylifecycle.h +++ b/Include/cpython/pylifecycle.h @@ -62,9 +62,9 @@ PyAPI_FUNC(int) _Py_CoerceLegacyLocale(int warn); PyAPI_FUNC(int) _Py_LegacyLocaleDetected(int warn); PyAPI_FUNC(char *) _Py_SetLocaleFromEnv(int category); -PyAPI_FUNC(PyStatus) _Py_NewInterpreterFromConfig( +PyAPI_FUNC(PyStatus) Py_NewInterpreterFromConfig( PyThreadState **tstate_p, - const _PyInterpreterConfig *config); + const PyInterpreterConfig *config); typedef void (*atexit_datacallbackfunc)(void *); PyAPI_FUNC(int) _Py_AtExit( diff --git a/Include/descrobject.h b/Include/descrobject.h index 0a420b865dfd1b..fd66d17b497a31 100644 --- a/Include/descrobject.h +++ b/Include/descrobject.h @@ -83,6 +83,7 @@ struct PyMemberDef { #define Py_READONLY 1 #define Py_AUDIT_READ 2 // Added in 3.10, harmless no-op before that #define _Py_WRITE_RESTRICTED 4 // Deprecated, no-op. Do not reuse the value. +#define Py_RELATIVE_OFFSET 8 PyAPI_FUNC(PyObject *) PyMember_GetOne(const char *, PyMemberDef *); PyAPI_FUNC(int) PyMember_SetOne(char *, PyMemberDef *, PyObject *); diff --git a/Include/internal/pycore_ceval.h b/Include/internal/pycore_ceval.h index deda070a6dea79..3c8b368bd2af4e 100644 --- a/Include/internal/pycore_ceval.h +++ b/Include/internal/pycore_ceval.h @@ -21,8 +21,7 @@ struct _ceval_runtime_state; extern void _Py_FinishPendingCalls(PyThreadState *tstate); -extern void _PyEval_InitRuntimeState(struct _ceval_runtime_state *); -extern void _PyEval_InitState(struct _ceval_state *, PyThread_type_lock); +extern void _PyEval_InitState(PyInterpreterState *, PyThread_type_lock); extern void _PyEval_FiniState(struct _ceval_state *ceval); PyAPI_FUNC(void) _PyEval_SignalReceived(PyInterpreterState *interp); PyAPI_FUNC(int) _PyEval_AddPendingCall( @@ -96,11 +95,13 @@ _PyEval_Vector(PyThreadState *tstate, PyObject* const* args, size_t argcount, PyObject *kwnames); -extern int _PyEval_ThreadsInitialized(struct pyruntimestate *runtime); -extern PyStatus _PyEval_InitGIL(PyThreadState *tstate); +extern int _PyEval_ThreadsInitialized(void); +extern PyStatus _PyEval_InitGIL(PyThreadState *tstate, int own_gil); extern void _PyEval_FiniGIL(PyInterpreterState *interp); +extern void _PyEval_AcquireLock(PyThreadState *tstate); extern void _PyEval_ReleaseLock(PyThreadState *tstate); +extern PyThreadState * _PyThreadState_SwapNoGIL(PyThreadState *); extern void _PyEval_DeactivateOpCache(void); diff --git a/Include/internal/pycore_ceval_state.h b/Include/internal/pycore_ceval_state.h index 9ba42eb03b2676..b352801673c40a 100644 --- a/Include/internal/pycore_ceval_state.h +++ b/Include/internal/pycore_ceval_state.h @@ -49,7 +49,6 @@ struct _ceval_runtime_state { the main thread of the main interpreter can handle signals: see _Py_ThreadCanHandleSignals(). */ _Py_atomic_int signals_pending; - struct _gil_runtime_state gil; }; #ifdef PY_HAVE_PERF_TRAMPOLINE @@ -83,6 +82,8 @@ struct _pending_calls { struct _ceval_state { int recursion_limit; + struct _gil_runtime_state *gil; + int own_gil; /* This single variable consolidates all requests to break out of the fast path in the eval loop. */ _Py_atomic_int eval_breaker; diff --git a/Include/internal/pycore_code.h b/Include/internal/pycore_code.h index 86fd48b63ef8e4..c1f017fdb753c5 100644 --- a/Include/internal/pycore_code.h +++ b/Include/internal/pycore_code.h @@ -131,6 +131,7 @@ struct callable_cache { // Note that these all fit within a byte, as do combinations. // Later, we will use the smaller numbers to differentiate the different // kinds of locals (e.g. pos-only arg, varkwargs, local-only). +#define CO_FAST_HIDDEN 0x10 #define CO_FAST_LOCAL 0x20 #define CO_FAST_CELL 0x40 #define CO_FAST_FREE 0x80 diff --git a/Include/internal/pycore_compile.h b/Include/internal/pycore_compile.h index 1a032f652dddaf..499f55f3e276be 100644 --- a/Include/internal/pycore_compile.h +++ b/Include/internal/pycore_compile.h @@ -70,6 +70,9 @@ typedef struct { PyObject *u_varnames; /* local variables */ PyObject *u_cellvars; /* cell variables */ PyObject *u_freevars; /* free variables */ + PyObject *u_fasthidden; /* dict; keys are names that are fast-locals only + temporarily within an inlined comprehension. When + value is True, treat as fast-local. */ Py_ssize_t u_argcount; /* number of arguments for block */ Py_ssize_t u_posonlyargcount; /* number of positional only arguments for block */ @@ -97,12 +100,17 @@ PyAPI_FUNC(PyObject*) _PyCompile_CodeGen( PyObject *ast, PyObject *filename, PyCompilerFlags *flags, - int optimize); + int optimize, + int compile_mode); PyAPI_FUNC(PyObject*) _PyCompile_OptimizeCfg( PyObject *instructions, PyObject *consts); +PyAPI_FUNC(PyCodeObject*) +_PyCompile_Assemble(_PyCompile_CodeUnitMetadata *umd, PyObject *filename, + PyObject *instructions); + #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_flowgraph.h b/Include/internal/pycore_flowgraph.h index 883334f4b182eb..720feb18636959 100644 --- a/Include/internal/pycore_flowgraph.h +++ b/Include/internal/pycore_flowgraph.h @@ -94,7 +94,7 @@ _PyCfgInstruction* _PyCfg_BasicblockLastInstr(const _PyCfgBasicblock *b); int _PyCfg_OptimizeCodeUnit(_PyCfgBuilder *g, PyObject *consts, PyObject *const_cache, int code_flags, int nlocals, int nparams, int firstlineno); int _PyCfg_Stackdepth(_PyCfgBasicblock *entryblock, int code_flags); -void _PyCfg_ConvertExceptionHandlersToNops(_PyCfgBasicblock *entryblock); +void _PyCfg_ConvertPseudoOps(_PyCfgBasicblock *entryblock); int _PyCfg_ResolveJumps(_PyCfgBuilder *g); diff --git a/Include/internal/pycore_format.h b/Include/internal/pycore_format.h index 1899609e77ef20..1b8d57539ca505 100644 --- a/Include/internal/pycore_format.h +++ b/Include/internal/pycore_format.h @@ -14,14 +14,12 @@ extern "C" { * F_BLANK ' ' * F_ALT '#' * F_ZERO '0' - * F_NO_NEG_0 'z' */ #define F_LJUST (1<<0) #define F_SIGN (1<<1) #define F_BLANK (1<<2) #define F_ALT (1<<3) #define F_ZERO (1<<4) -#define F_NO_NEG_0 (1<<5) #ifdef __cplusplus } diff --git a/Include/internal/pycore_frame.h b/Include/internal/pycore_frame.h index d8d7fe9ef2ebde..3d3cbbff7aae81 100644 --- a/Include/internal/pycore_frame.h +++ b/Include/internal/pycore_frame.h @@ -265,8 +265,6 @@ _PyFrame_PushUnchecked(PyThreadState *tstate, PyFunctionObject *func, int null_l return new_frame; } -int _PyInterpreterFrame_GetLine(_PyInterpreterFrame *frame); - static inline PyGenObject *_PyFrame_GetGenerator(_PyInterpreterFrame *frame) { diff --git a/Include/internal/pycore_global_objects_fini_generated.h b/Include/internal/pycore_global_objects_fini_generated.h index 4fa15d74b3ad64..7e495817981f06 100644 --- a/Include/internal/pycore_global_objects_fini_generated.h +++ b/Include/internal/pycore_global_objects_fini_generated.h @@ -593,6 +593,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) { _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__await__)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__bases__)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__bool__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__buffer__)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__build_class__)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__builtins__)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__bytes__)); @@ -692,6 +693,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) { _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__rdivmod__)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__reduce__)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__reduce_ex__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__release_buffer__)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__repr__)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__reversed__)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__rfloordiv__)); @@ -845,6 +847,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) { _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(code)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(command)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(comment_factory)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(compile_mode)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(consts)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(context)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(cookie)); @@ -882,6 +885,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) { _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(dst_dir_fd)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(duration)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(e)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(eager_start)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(effective_ids)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(element_factory)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(encode)); @@ -972,6 +976,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) { _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(instructions)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(intern)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(intersection)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(is_running)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(isatty)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(isinstance)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(isoformat)); @@ -1027,6 +1032,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) { _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(memlimit)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(message)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(metaclass)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(metadata)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(method)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(mod)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(mode)); @@ -1119,6 +1125,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) { _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(reducer_override)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(registry)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(rel_tol)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(release)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(reload)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(repl)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(replace)); diff --git a/Include/internal/pycore_global_strings.h b/Include/internal/pycore_global_strings.h index e19d8ff1b50468..8ebfee85c87c23 100644 --- a/Include/internal/pycore_global_strings.h +++ b/Include/internal/pycore_global_strings.h @@ -81,6 +81,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(__await__) STRUCT_FOR_ID(__bases__) STRUCT_FOR_ID(__bool__) + STRUCT_FOR_ID(__buffer__) STRUCT_FOR_ID(__build_class__) STRUCT_FOR_ID(__builtins__) STRUCT_FOR_ID(__bytes__) @@ -180,6 +181,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(__rdivmod__) STRUCT_FOR_ID(__reduce__) STRUCT_FOR_ID(__reduce_ex__) + STRUCT_FOR_ID(__release_buffer__) STRUCT_FOR_ID(__repr__) STRUCT_FOR_ID(__reversed__) STRUCT_FOR_ID(__rfloordiv__) @@ -333,6 +335,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(code) STRUCT_FOR_ID(command) STRUCT_FOR_ID(comment_factory) + STRUCT_FOR_ID(compile_mode) STRUCT_FOR_ID(consts) STRUCT_FOR_ID(context) STRUCT_FOR_ID(cookie) @@ -370,6 +373,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(dst_dir_fd) STRUCT_FOR_ID(duration) STRUCT_FOR_ID(e) + STRUCT_FOR_ID(eager_start) STRUCT_FOR_ID(effective_ids) STRUCT_FOR_ID(element_factory) STRUCT_FOR_ID(encode) @@ -460,6 +464,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(instructions) STRUCT_FOR_ID(intern) STRUCT_FOR_ID(intersection) + STRUCT_FOR_ID(is_running) STRUCT_FOR_ID(isatty) STRUCT_FOR_ID(isinstance) STRUCT_FOR_ID(isoformat) @@ -515,6 +520,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(memlimit) STRUCT_FOR_ID(message) STRUCT_FOR_ID(metaclass) + STRUCT_FOR_ID(metadata) STRUCT_FOR_ID(method) STRUCT_FOR_ID(mod) STRUCT_FOR_ID(mode) @@ -607,6 +613,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(reducer_override) STRUCT_FOR_ID(registry) STRUCT_FOR_ID(rel_tol) + STRUCT_FOR_ID(release) STRUCT_FOR_ID(reload) STRUCT_FOR_ID(repl) STRUCT_FOR_ID(replace) diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 7276ce35ba68f0..527b2121148f4c 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -178,6 +178,9 @@ struct _is { basis. Also see _PyRuntimeState regarding the various mutex fields. */ + /* The per-interpreter GIL, which might not be used. */ + struct _gil_runtime_state _gil; + /* the initial PyInterpreterState.threads.head */ PyThreadState _initial_thread; }; diff --git a/Include/internal/pycore_intrinsics.h b/Include/internal/pycore_intrinsics.h index 46a52740eb8a0c..3902059a04b9da 100644 --- a/Include/internal/pycore_intrinsics.h +++ b/Include/internal/pycore_intrinsics.h @@ -1,26 +1,24 @@ +// Auto-generated by Tools/build/generate_opcode_h.py from Lib/opcode.py /* Unary Functions: */ +#define INTRINSIC_1_INVALID 0 +#define INTRINSIC_PRINT 1 +#define INTRINSIC_IMPORT_STAR 2 +#define INTRINSIC_STOPITERATION_ERROR 3 +#define INTRINSIC_ASYNC_GEN_WRAP 4 +#define INTRINSIC_UNARY_POSITIVE 5 +#define INTRINSIC_LIST_TO_TUPLE 6 -#define INTRINSIC_PRINT 1 -#define INTRINSIC_IMPORT_STAR 2 -#define INTRINSIC_STOPITERATION_ERROR 3 -#define INTRINSIC_ASYNC_GEN_WRAP 4 -#define INTRINSIC_UNARY_POSITIVE 5 -#define INTRINSIC_LIST_TO_TUPLE 6 - -#define MAX_INTRINSIC_1 6 +#define MAX_INTRINSIC_1 6 /* Binary Functions: */ +#define INTRINSIC_2_INVALID 0 +#define INTRINSIC_PREP_RERAISE_STAR 1 -#define INTRINSIC_PREP_RERAISE_STAR 1 - -#define MAX_INTRINSIC_2 1 - +#define MAX_INTRINSIC_2 1 typedef PyObject *(*instrinsic_func1)(PyThreadState* tstate, PyObject *value); typedef PyObject *(*instrinsic_func2)(PyThreadState* tstate, PyObject *value1, PyObject *value2); - extern const instrinsic_func1 _PyIntrinsics_UnaryFunctions[]; extern const instrinsic_func2 _PyIntrinsics_BinaryFunctions[]; - diff --git a/Include/internal/pycore_memoryobject.h b/Include/internal/pycore_memoryobject.h new file mode 100644 index 00000000000000..fe19e3f9611a16 --- /dev/null +++ b/Include/internal/pycore_memoryobject.h @@ -0,0 +1,18 @@ +#ifndef Py_INTERNAL_MEMORYOBJECT_H +#define Py_INTERNAL_MEMORYOBJECT_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + +PyObject * +_PyMemoryView_FromBufferProc(PyObject *v, int flags, + getbufferproc bufferproc); + +#ifdef __cplusplus +} +#endif +#endif /* !Py_INTERNAL_MEMORYOBJECT_H */ diff --git a/Include/internal/pycore_moduleobject.h b/Include/internal/pycore_moduleobject.h index 76361b8dff113a..15a1bcb6ae5163 100644 --- a/Include/internal/pycore_moduleobject.h +++ b/Include/internal/pycore_moduleobject.h @@ -36,6 +36,9 @@ static inline PyObject* _PyModule_GetDict(PyObject *mod) { return dict; } +PyObject* _Py_module_getattro_impl(PyModuleObject *m, PyObject *name, int suppress); +PyObject* _Py_module_getattro(PyModuleObject *m, PyObject *name); + #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h index 2ca047846e0935..500b3eece68055 100644 --- a/Include/internal/pycore_object.h +++ b/Include/internal/pycore_object.h @@ -58,6 +58,9 @@ extern void _Py_DecRefTotal(PyInterpreterState *); // Increment reference count by n static inline void _Py_RefcntAdd(PyObject* op, Py_ssize_t n) { + if (_Py_IsImmortal(op)) { + return; + } #ifdef Py_REF_DEBUG _Py_AddRefTotal(_PyInterpreterState_GET(), n); #endif @@ -272,8 +275,9 @@ _PyObject_GET_WEAKREFS_LISTPTR(PyObject *op) { if (PyType_Check(op) && ((PyTypeObject *)op)->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { + PyInterpreterState *interp = _PyInterpreterState_GET(); static_builtin_state *state = _PyStaticType_GetState( - (PyTypeObject *)op); + interp, (PyTypeObject *)op); return _PyStaticType_GET_WEAKREFS_LISTPTR(state); } // Essentially _PyObject_GET_WEAKREFS_LISTPTR_FROM_OFFSET(): @@ -330,10 +334,6 @@ extern int _Py_CheckSlotResult( const char *slot_name, int success); -// PyType_Ready() must be called if _PyType_IsReady() is false. -// See also the Py_TPFLAGS_READY flag. -#define _PyType_IsReady(type) ((type)->tp_dict != NULL) - // Test if a type supports weak references static inline int _PyType_SUPPORTS_WEAKREFS(PyTypeObject *type) { return (type->tp_weaklistoffset != 0); @@ -391,13 +391,6 @@ _PyDictOrValues_SetValues(PyDictOrValues *ptr, PyDictValues *values) extern PyObject ** _PyObject_ComputedDictPointer(PyObject *); extern void _PyObject_FreeInstanceAttributes(PyObject *obj); extern int _PyObject_IsInstanceDictEmpty(PyObject *); -extern int _PyType_HasSubclasses(PyTypeObject *); -extern PyObject* _PyType_GetSubclasses(PyTypeObject *); - -// Access macro to the members which are floating "behind" the object -static inline PyMemberDef* _PyHeapType_GET_MEMBERS(PyHeapTypeObject *etype) { - return (PyMemberDef*)((char*)etype + Py_TYPE(etype)->tp_basicsize); -} PyAPI_FUNC(PyObject *) _PyObject_LookupSpecial(PyObject *, PyObject *); diff --git a/Include/internal/pycore_opcode.h b/Include/internal/pycore_opcode.h index 797bf08131f164..a4abbcc8be385e 100644 --- a/Include/internal/pycore_opcode.h +++ b/Include/internal/pycore_opcode.h @@ -174,6 +174,7 @@ const uint8_t _PyOpcode_Deopt[256] = { [LOAD_CONST__LOAD_FAST] = LOAD_CONST, [LOAD_DEREF] = LOAD_DEREF, [LOAD_FAST] = LOAD_FAST, + [LOAD_FAST_AND_CLEAR] = LOAD_FAST_AND_CLEAR, [LOAD_FAST_CHECK] = LOAD_FAST_CHECK, [LOAD_FAST__LOAD_CONST] = LOAD_FAST, [LOAD_FAST__LOAD_FAST] = LOAD_FAST, @@ -240,7 +241,7 @@ const uint8_t _PyOpcode_Deopt[256] = { #endif // NEED_OPCODE_TABLES #ifdef Py_DEBUG -static const char *const _PyOpcode_OpName[266] = { +static const char *const _PyOpcode_OpName[267] = { [CACHE] = "CACHE", [POP_TOP] = "POP_TOP", [PUSH_NULL] = "PUSH_NULL", @@ -384,7 +385,7 @@ static const char *const _PyOpcode_OpName[266] = { [JUMP_BACKWARD] = "JUMP_BACKWARD", [LOAD_SUPER_ATTR] = "LOAD_SUPER_ATTR", [CALL_FUNCTION_EX] = "CALL_FUNCTION_EX", - [STORE_FAST__LOAD_FAST] = "STORE_FAST__LOAD_FAST", + [LOAD_FAST_AND_CLEAR] = "LOAD_FAST_AND_CLEAR", [EXTENDED_ARG] = "EXTENDED_ARG", [LIST_APPEND] = "LIST_APPEND", [SET_ADD] = "SET_ADD", @@ -394,21 +395,21 @@ static const char *const _PyOpcode_OpName[266] = { [YIELD_VALUE] = "YIELD_VALUE", [RESUME] = "RESUME", [MATCH_CLASS] = "MATCH_CLASS", + [STORE_FAST__LOAD_FAST] = "STORE_FAST__LOAD_FAST", [STORE_FAST__STORE_FAST] = "STORE_FAST__STORE_FAST", - [STORE_SUBSCR_DICT] = "STORE_SUBSCR_DICT", [FORMAT_VALUE] = "FORMAT_VALUE", [BUILD_CONST_KEY_MAP] = "BUILD_CONST_KEY_MAP", [BUILD_STRING] = "BUILD_STRING", + [STORE_SUBSCR_DICT] = "STORE_SUBSCR_DICT", [STORE_SUBSCR_LIST_INT] = "STORE_SUBSCR_LIST_INT", [UNPACK_SEQUENCE_LIST] = "UNPACK_SEQUENCE_LIST", [UNPACK_SEQUENCE_TUPLE] = "UNPACK_SEQUENCE_TUPLE", - [UNPACK_SEQUENCE_TWO_TUPLE] = "UNPACK_SEQUENCE_TWO_TUPLE", [LIST_EXTEND] = "LIST_EXTEND", [SET_UPDATE] = "SET_UPDATE", [DICT_MERGE] = "DICT_MERGE", [DICT_UPDATE] = "DICT_UPDATE", + [UNPACK_SEQUENCE_TWO_TUPLE] = "UNPACK_SEQUENCE_TWO_TUPLE", [SEND_GEN] = "SEND_GEN", - [167] = "<167>", [168] = "<168>", [169] = "<169>", [170] = "<170>", @@ -507,11 +508,11 @@ static const char *const _PyOpcode_OpName[266] = { [LOAD_SUPER_METHOD] = "LOAD_SUPER_METHOD", [LOAD_ZERO_SUPER_METHOD] = "LOAD_ZERO_SUPER_METHOD", [LOAD_ZERO_SUPER_ATTR] = "LOAD_ZERO_SUPER_ATTR", + [STORE_FAST_MAYBE_NULL] = "STORE_FAST_MAYBE_NULL", }; #endif #define EXTRA_CASES \ - case 167: \ case 168: \ case 169: \ case 170: \ diff --git a/Include/internal/pycore_pylifecycle.h b/Include/internal/pycore_pylifecycle.h index f96261a650dac7..7f8cc643ec0c96 100644 --- a/Include/internal/pycore_pylifecycle.h +++ b/Include/internal/pycore_pylifecycle.h @@ -39,7 +39,7 @@ extern PyStatus _PySys_Create( extern PyStatus _PySys_ReadPreinitWarnOptions(PyWideStringList *options); extern PyStatus _PySys_ReadPreinitXOptions(PyConfig *config); extern int _PySys_UpdateConfig(PyThreadState *tstate); -extern void _PySys_Fini(PyInterpreterState *interp); +extern void _PySys_FiniTypes(PyInterpreterState *interp); extern int _PyBuiltins_AddExceptions(PyObject * bltinmod); extern PyStatus _Py_HashRandomization_Init(const PyConfig *); diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h index 180ea676bc22eb..daa40cf4bcd855 100644 --- a/Include/internal/pycore_pystate.h +++ b/Include/internal/pycore_pystate.h @@ -68,7 +68,7 @@ _Py_ThreadCanHandlePendingCalls(void) } -/* Variable and macro for in-line access to current thread +/* Variable and static inline functions for in-line access to current thread and interpreter state */ #if defined(HAVE_THREAD_LOCAL) && !defined(Py_BUILD_CORE_MODULE) @@ -93,12 +93,6 @@ _PyThreadState_GET(void) #endif } -static inline PyThreadState* -_PyRuntimeState_GetThreadState(_PyRuntimeState *Py_UNUSED(runtime)) -{ - return _PyThreadState_GET(); -} - static inline void _Py_EnsureFuncTstateNotNULL(const char *func, PyThreadState *tstate) @@ -118,7 +112,7 @@ _Py_EnsureFuncTstateNotNULL(const char *func, PyThreadState *tstate) /* Get the current interpreter state. - The macro is unsafe: it does not check for error and it can return NULL. + The function is unsafe: it does not check for error and it can return NULL. The caller must hold the GIL. diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h index d1b165d0ab9c38..6e06e874711bc2 100644 --- a/Include/internal/pycore_runtime.h +++ b/Include/internal/pycore_runtime.h @@ -32,8 +32,6 @@ struct _getargs_runtime_state { struct _PyArg_Parser *static_parsers; }; -/* ceval state */ - /* GIL state */ struct _gilstate_runtime_state { diff --git a/Include/internal/pycore_runtime_init_generated.h b/Include/internal/pycore_runtime_init_generated.h index 42c4874d9466bf..7b9c73dd1edf3b 100644 --- a/Include/internal/pycore_runtime_init_generated.h +++ b/Include/internal/pycore_runtime_init_generated.h @@ -587,6 +587,7 @@ extern "C" { INIT_ID(__await__), \ INIT_ID(__bases__), \ INIT_ID(__bool__), \ + INIT_ID(__buffer__), \ INIT_ID(__build_class__), \ INIT_ID(__builtins__), \ INIT_ID(__bytes__), \ @@ -686,6 +687,7 @@ extern "C" { INIT_ID(__rdivmod__), \ INIT_ID(__reduce__), \ INIT_ID(__reduce_ex__), \ + INIT_ID(__release_buffer__), \ INIT_ID(__repr__), \ INIT_ID(__reversed__), \ INIT_ID(__rfloordiv__), \ @@ -839,6 +841,7 @@ extern "C" { INIT_ID(code), \ INIT_ID(command), \ INIT_ID(comment_factory), \ + INIT_ID(compile_mode), \ INIT_ID(consts), \ INIT_ID(context), \ INIT_ID(cookie), \ @@ -876,6 +879,7 @@ extern "C" { INIT_ID(dst_dir_fd), \ INIT_ID(duration), \ INIT_ID(e), \ + INIT_ID(eager_start), \ INIT_ID(effective_ids), \ INIT_ID(element_factory), \ INIT_ID(encode), \ @@ -966,6 +970,7 @@ extern "C" { INIT_ID(instructions), \ INIT_ID(intern), \ INIT_ID(intersection), \ + INIT_ID(is_running), \ INIT_ID(isatty), \ INIT_ID(isinstance), \ INIT_ID(isoformat), \ @@ -1021,6 +1026,7 @@ extern "C" { INIT_ID(memlimit), \ INIT_ID(message), \ INIT_ID(metaclass), \ + INIT_ID(metadata), \ INIT_ID(method), \ INIT_ID(mod), \ INIT_ID(mode), \ @@ -1113,6 +1119,7 @@ extern "C" { INIT_ID(reducer_override), \ INIT_ID(registry), \ INIT_ID(rel_tol), \ + INIT_ID(release), \ INIT_ID(reload), \ INIT_ID(repl), \ INIT_ID(replace), \ diff --git a/Include/internal/pycore_structseq.h b/Include/internal/pycore_structseq.h index d10a921c55ff8b..6f5dfc12707cf8 100644 --- a/Include/internal/pycore_structseq.h +++ b/Include/internal/pycore_structseq.h @@ -15,19 +15,23 @@ PyAPI_FUNC(PyTypeObject *) _PyStructSequence_NewType( PyStructSequence_Desc *desc, unsigned long tp_flags); -PyAPI_FUNC(int) _PyStructSequence_InitBuiltinWithFlags( +extern int _PyStructSequence_InitBuiltinWithFlags( + PyInterpreterState *interp, PyTypeObject *type, PyStructSequence_Desc *desc, unsigned long tp_flags); static inline int -_PyStructSequence_InitBuiltin(PyTypeObject *type, +_PyStructSequence_InitBuiltin(PyInterpreterState *interp, + PyTypeObject *type, PyStructSequence_Desc *desc) { - return _PyStructSequence_InitBuiltinWithFlags(type, desc, 0); + return _PyStructSequence_InitBuiltinWithFlags(interp, type, desc, 0); } -extern void _PyStructSequence_FiniType(PyTypeObject *type); +extern void _PyStructSequence_FiniBuiltin( + PyInterpreterState *interp, + PyTypeObject *type); #ifdef __cplusplus } diff --git a/Include/internal/pycore_symtable.h b/Include/internal/pycore_symtable.h index 512c4c931f73e4..9a005be5402c4e 100644 --- a/Include/internal/pycore_symtable.h +++ b/Include/internal/pycore_symtable.h @@ -64,6 +64,7 @@ typedef struct _symtable_entry { unsigned ste_needs_class_closure : 1; /* for class scopes, true if a closure over __class__ should be created */ + unsigned ste_comp_inlined : 1; /* true if this comprehension is inlined */ unsigned ste_comp_iter_target : 1; /* true if visiting comprehension target */ int ste_comp_iter_expr; /* non-zero if visiting a comprehension range expression */ int ste_lineno; /* first line of block */ diff --git a/Include/internal/pycore_typeobject.h b/Include/internal/pycore_typeobject.h index 76253fd5fd864c..f42f8f62de2c1d 100644 --- a/Include/internal/pycore_typeobject.h +++ b/Include/internal/pycore_typeobject.h @@ -44,6 +44,13 @@ struct type_cache { typedef struct { PyTypeObject *type; + int readying; + int ready; + // XXX tp_dict, tp_bases, and tp_mro can probably be statically + // allocated, instead of dynamically and stored on the interpreter. + PyObject *tp_dict; + PyObject *tp_bases; + PyObject *tp_mro; PyObject *tp_subclasses; /* We never clean up weakrefs for static builtin types since they will effectively never get triggered. However, there @@ -104,10 +111,24 @@ _PyType_GetModuleState(PyTypeObject *type) } -extern int _PyStaticType_InitBuiltin(PyTypeObject *type); -extern static_builtin_state * _PyStaticType_GetState(PyTypeObject *); -extern void _PyStaticType_ClearWeakRefs(PyTypeObject *type); -extern void _PyStaticType_Dealloc(PyTypeObject *type); +extern int _PyStaticType_InitBuiltin(PyInterpreterState *, PyTypeObject *type); +extern static_builtin_state * _PyStaticType_GetState(PyInterpreterState *, PyTypeObject *); +extern void _PyStaticType_ClearWeakRefs(PyInterpreterState *, PyTypeObject *type); +extern void _PyStaticType_Dealloc(PyInterpreterState *, PyTypeObject *); + +PyAPI_FUNC(PyObject *) _PyType_GetDict(PyTypeObject *); +extern PyObject * _PyType_GetBases(PyTypeObject *type); +extern PyObject * _PyType_GetMRO(PyTypeObject *type); +extern PyObject* _PyType_GetSubclasses(PyTypeObject *); +extern int _PyType_HasSubclasses(PyTypeObject *); + +// PyType_Ready() must be called if _PyType_IsReady() is false. +// See also the Py_TPFLAGS_READY flag. +static inline int +_PyType_IsReady(PyTypeObject *type) +{ + return _PyType_GetDict(type) != NULL; +} PyObject * _Py_type_getattro_impl(PyTypeObject *type, PyObject *name, int *suppress_missing_attribute); @@ -117,6 +138,8 @@ _Py_type_getattro(PyTypeObject *type, PyObject *name); PyObject *_Py_slot_tp_getattro(PyObject *self, PyObject *name); PyObject *_Py_slot_tp_getattr_hook(PyObject *self, PyObject *name); +PyAPI_DATA(PyTypeObject) _PyBufferWrapper_Type; + PyObject * _PySuper_Lookup(PyTypeObject *su_type, PyObject *su_obj, PyObject *name, int *meth_found); PyObject * diff --git a/Include/internal/pycore_unicodeobject_generated.h b/Include/internal/pycore_unicodeobject_generated.h index 6d9cd24d9f3a13..8e086edbdf8193 100644 --- a/Include/internal/pycore_unicodeobject_generated.h +++ b/Include/internal/pycore_unicodeobject_generated.h @@ -96,6 +96,9 @@ _PyUnicode_InitStaticStrings(PyInterpreterState *interp) { string = &_Py_ID(__bool__); assert(_PyUnicode_CheckConsistency(string, 1)); _PyUnicode_InternInPlace(interp, &string); + string = &_Py_ID(__buffer__); + assert(_PyUnicode_CheckConsistency(string, 1)); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__build_class__); assert(_PyUnicode_CheckConsistency(string, 1)); _PyUnicode_InternInPlace(interp, &string); @@ -393,6 +396,9 @@ _PyUnicode_InitStaticStrings(PyInterpreterState *interp) { string = &_Py_ID(__reduce_ex__); assert(_PyUnicode_CheckConsistency(string, 1)); _PyUnicode_InternInPlace(interp, &string); + string = &_Py_ID(__release_buffer__); + assert(_PyUnicode_CheckConsistency(string, 1)); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__repr__); assert(_PyUnicode_CheckConsistency(string, 1)); _PyUnicode_InternInPlace(interp, &string); @@ -852,6 +858,9 @@ _PyUnicode_InitStaticStrings(PyInterpreterState *interp) { string = &_Py_ID(comment_factory); assert(_PyUnicode_CheckConsistency(string, 1)); _PyUnicode_InternInPlace(interp, &string); + string = &_Py_ID(compile_mode); + assert(_PyUnicode_CheckConsistency(string, 1)); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(consts); assert(_PyUnicode_CheckConsistency(string, 1)); _PyUnicode_InternInPlace(interp, &string); @@ -963,6 +972,9 @@ _PyUnicode_InitStaticStrings(PyInterpreterState *interp) { string = &_Py_ID(e); assert(_PyUnicode_CheckConsistency(string, 1)); _PyUnicode_InternInPlace(interp, &string); + string = &_Py_ID(eager_start); + assert(_PyUnicode_CheckConsistency(string, 1)); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(effective_ids); assert(_PyUnicode_CheckConsistency(string, 1)); _PyUnicode_InternInPlace(interp, &string); @@ -1233,6 +1245,9 @@ _PyUnicode_InitStaticStrings(PyInterpreterState *interp) { string = &_Py_ID(intersection); assert(_PyUnicode_CheckConsistency(string, 1)); _PyUnicode_InternInPlace(interp, &string); + string = &_Py_ID(is_running); + assert(_PyUnicode_CheckConsistency(string, 1)); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(isatty); assert(_PyUnicode_CheckConsistency(string, 1)); _PyUnicode_InternInPlace(interp, &string); @@ -1398,6 +1413,9 @@ _PyUnicode_InitStaticStrings(PyInterpreterState *interp) { string = &_Py_ID(metaclass); assert(_PyUnicode_CheckConsistency(string, 1)); _PyUnicode_InternInPlace(interp, &string); + string = &_Py_ID(metadata); + assert(_PyUnicode_CheckConsistency(string, 1)); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(method); assert(_PyUnicode_CheckConsistency(string, 1)); _PyUnicode_InternInPlace(interp, &string); @@ -1674,6 +1692,9 @@ _PyUnicode_InitStaticStrings(PyInterpreterState *interp) { string = &_Py_ID(rel_tol); assert(_PyUnicode_CheckConsistency(string, 1)); _PyUnicode_InternInPlace(interp, &string); + string = &_Py_ID(release); + assert(_PyUnicode_CheckConsistency(string, 1)); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(reload); assert(_PyUnicode_CheckConsistency(string, 1)); _PyUnicode_InternInPlace(interp, &string); diff --git a/Include/moduleobject.h b/Include/moduleobject.h index 555564ec73b4a2..b8bdfe29d80406 100644 --- a/Include/moduleobject.h +++ b/Include/moduleobject.h @@ -78,11 +78,17 @@ struct PyModuleDef_Slot { #define Py_mod_create 1 #define Py_mod_exec 2 +#define Py_mod_multiple_interpreters 3 #ifndef Py_LIMITED_API -#define _Py_mod_LAST_SLOT 2 +#define _Py_mod_LAST_SLOT 3 #endif +/* for Py_mod_multiple_interpreters: */ +#define Py_MOD_MULTIPLE_INTERPRETERS_NOT_SUPPORTED ((void *)0) +#define Py_MOD_MULTIPLE_INTERPRETERS_SUPPORTED ((void *)1) +#define Py_MOD_PER_INTERPRETER_GIL_SUPPORTED ((void *)2) + #endif /* New in 3.5 */ struct PyModuleDef { diff --git a/Include/object.h b/Include/object.h index 66c3df0d7f780a..81aeb2d8bd5a69 100644 --- a/Include/object.h +++ b/Include/object.h @@ -355,6 +355,8 @@ PyAPI_FUNC(PyObject *) PyType_GetQualName(PyTypeObject *); #endif #if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x030C0000 PyAPI_FUNC(PyObject *) PyType_FromMetaclass(PyTypeObject*, PyObject*, PyType_Spec*, PyObject*); +PyAPI_FUNC(void *) PyObject_GetTypeData(PyObject *obj, PyTypeObject *cls); +PyAPI_FUNC(Py_ssize_t) PyType_GetTypeDataSize(PyTypeObject *cls); #endif /* Generic type check */ @@ -521,6 +523,9 @@ given type object has a specified feature. // subject itself (rather than a mapped attribute on it): #define _Py_TPFLAGS_MATCH_SELF (1UL << 22) +/* Items (ob_size*tp_itemsize) are found at the end of an instance's memory */ +#define Py_TPFLAGS_ITEMS_AT_END (1UL << 23) + /* These flags are used to determine if a type is a subclass. */ #define Py_TPFLAGS_LONG_SUBCLASS (1UL << 24) #define Py_TPFLAGS_LIST_SUBCLASS (1UL << 25) diff --git a/Include/opcode.h b/Include/opcode.h index a8b96c3d38b131..d91d2f7a5fff02 100644 --- a/Include/opcode.h +++ b/Include/opcode.h @@ -97,6 +97,7 @@ extern "C" { #define JUMP_BACKWARD 140 #define LOAD_SUPER_ATTR 141 #define CALL_FUNCTION_EX 142 +#define LOAD_FAST_AND_CLEAR 143 #define EXTENDED_ARG 144 #define LIST_APPEND 145 #define SET_ADD 146 @@ -147,7 +148,8 @@ extern "C" { #define LOAD_SUPER_METHOD 263 #define LOAD_ZERO_SUPER_METHOD 264 #define LOAD_ZERO_SUPER_ATTR 265 -#define MAX_PSEUDO_OPCODE 265 +#define STORE_FAST_MAYBE_NULL 266 +#define MAX_PSEUDO_OPCODE 266 #define BINARY_OP_ADD_FLOAT 6 #define BINARY_OP_ADD_INT 7 #define BINARY_OP_ADD_UNICODE 8 @@ -203,14 +205,14 @@ extern "C" { #define STORE_ATTR_INSTANCE_VALUE 111 #define STORE_ATTR_SLOT 112 #define STORE_ATTR_WITH_HINT 113 -#define STORE_FAST__LOAD_FAST 143 -#define STORE_FAST__STORE_FAST 153 -#define STORE_SUBSCR_DICT 154 -#define STORE_SUBSCR_LIST_INT 158 -#define UNPACK_SEQUENCE_LIST 159 -#define UNPACK_SEQUENCE_TUPLE 160 -#define UNPACK_SEQUENCE_TWO_TUPLE 161 -#define SEND_GEN 166 +#define STORE_FAST__LOAD_FAST 153 +#define STORE_FAST__STORE_FAST 154 +#define STORE_SUBSCR_DICT 158 +#define STORE_SUBSCR_LIST_INT 159 +#define UNPACK_SEQUENCE_LIST 160 +#define UNPACK_SEQUENCE_TUPLE 161 +#define UNPACK_SEQUENCE_TWO_TUPLE 166 +#define SEND_GEN 167 #define HAS_ARG(op) ((((op) >= HAVE_ARGUMENT) && (!IS_PSEUDO_OPCODE(op)))\ || ((op) == JUMP) \ @@ -219,6 +221,7 @@ extern "C" { || ((op) == LOAD_SUPER_METHOD) \ || ((op) == LOAD_ZERO_SUPER_METHOD) \ || ((op) == LOAD_ZERO_SUPER_ATTR) \ + || ((op) == STORE_FAST_MAYBE_NULL) \ ) #define HAS_CONST(op) (false\ diff --git a/Include/pybuffer.h b/Include/pybuffer.h index bbac60972f5127..ca1c6058d9052c 100644 --- a/Include/pybuffer.h +++ b/Include/pybuffer.h @@ -104,7 +104,7 @@ PyAPI_FUNC(void) PyBuffer_Release(Py_buffer *view); /* Maximum number of dimensions */ #define PyBUF_MAX_NDIM 64 -/* Flags for getting buffers */ +/* Flags for getting buffers. Keep these in sync with inspect.BufferFlags. */ #define PyBUF_SIMPLE 0 #define PyBUF_WRITABLE 0x0001 diff --git a/Include/pyport.h b/Include/pyport.h index bd0ba6d0681b21..d7c6ae64f2bf2f 100644 --- a/Include/pyport.h +++ b/Include/pyport.h @@ -765,4 +765,15 @@ extern char * _getpty(int *, int, mode_t, int); #undef __bool__ #endif +// Make sure we have maximum alignment, even if the current compiler +// does not support max_align_t. Note that: +// - Autoconf reports alignment of unknown types to 0. +// - 'long double' has maximum alignment on *most* platforms, +// looks like the best we can do for pre-C11 compilers. +// - The value is tested, see test_alignof_max_align_t +#if !defined(ALIGNOF_MAX_ALIGN_T) || ALIGNOF_MAX_ALIGN_T == 0 +# undef ALIGNOF_MAX_ALIGN_T +# define ALIGNOF_MAX_ALIGN_T _Alignof(long double) +#endif + #endif /* Py_PYPORT_H */ diff --git a/Include/unicodeobject.h b/Include/unicodeobject.h index 74474f5bb8f976..5839c747a29275 100644 --- a/Include/unicodeobject.h +++ b/Include/unicodeobject.h @@ -626,7 +626,7 @@ PyAPI_FUNC(PyObject*) PyUnicode_AsLatin1String( /* --- ASCII Codecs ------------------------------------------------------- - Only 7-bit ASCII data is excepted. All other codes generate errors. + Only 7-bit ASCII data is expected. All other codes generate errors. */ diff --git a/Lib/_collections_abc.py b/Lib/_collections_abc.py index 9d7724c33474cc..601107d2d86771 100644 --- a/Lib/_collections_abc.py +++ b/Lib/_collections_abc.py @@ -49,7 +49,7 @@ def _f(): pass "Mapping", "MutableMapping", "MappingView", "KeysView", "ItemsView", "ValuesView", "Sequence", "MutableSequence", - "ByteString", + "ByteString", "Buffer", ] # This module has been renamed from collections.abc to _collections_abc to @@ -439,6 +439,21 @@ def __subclasshook__(cls, C): return NotImplemented +class Buffer(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __buffer__(self, flags: int, /) -> memoryview: + raise NotImplementedError + + @classmethod + def __subclasshook__(cls, C): + if cls is Buffer: + return _check_methods(C, "__buffer__") + return NotImplemented + + class _CallableGenericAlias(GenericAlias): """ Represent `Callable[argtypes, resulttype]`. @@ -1056,8 +1071,27 @@ def count(self, value): Sequence.register(range) Sequence.register(memoryview) +class _DeprecateByteStringMeta(ABCMeta): + def __new__(cls, name, bases, namespace, **kwargs): + if name != "ByteString": + import warnings + + warnings._deprecated( + "collections.abc.ByteString", + remove=(3, 14), + ) + return super().__new__(cls, name, bases, namespace, **kwargs) + + def __instancecheck__(cls, instance): + import warnings + + warnings._deprecated( + "collections.abc.ByteString", + remove=(3, 14), + ) + return super().__instancecheck__(instance) -class ByteString(Sequence): +class ByteString(Sequence, metaclass=_DeprecateByteStringMeta): """This unifies bytes and bytearray. XXX Should add all their methods. diff --git a/Lib/_pydatetime.py b/Lib/_pydatetime.py new file mode 100644 index 00000000000000..f4fc2c58e5e293 --- /dev/null +++ b/Lib/_pydatetime.py @@ -0,0 +1,2647 @@ +"""Concrete date/time and related types. + +See http://www.iana.org/time-zones/repository/tz-link.html for +time zone and DST data sources. +""" + +__all__ = ("date", "datetime", "time", "timedelta", "timezone", "tzinfo", + "MINYEAR", "MAXYEAR", "UTC") + + +import time as _time +import math as _math +import sys +from operator import index as _index + +def _cmp(x, y): + return 0 if x == y else 1 if x > y else -1 + +def _get_class_module(self): + module_name = self.__class__.__module__ + if module_name == '_pydatetime': + return 'datetime' + else: + return module_name + +MINYEAR = 1 +MAXYEAR = 9999 +_MAXORDINAL = 3652059 # date.max.toordinal() + +# Utility functions, adapted from Python's Demo/classes/Dates.py, which +# also assumes the current Gregorian calendar indefinitely extended in +# both directions. Difference: Dates.py calls January 1 of year 0 day +# number 1. The code here calls January 1 of year 1 day number 1. This is +# to match the definition of the "proleptic Gregorian" calendar in Dershowitz +# and Reingold's "Calendrical Calculations", where it's the base calendar +# for all computations. See the book for algorithms for converting between +# proleptic Gregorian ordinals and many other calendar systems. + +# -1 is a placeholder for indexing purposes. +_DAYS_IN_MONTH = [-1, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] + +_DAYS_BEFORE_MONTH = [-1] # -1 is a placeholder for indexing purposes. +dbm = 0 +for dim in _DAYS_IN_MONTH[1:]: + _DAYS_BEFORE_MONTH.append(dbm) + dbm += dim +del dbm, dim + +def _is_leap(year): + "year -> 1 if leap year, else 0." + return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0) + +def _days_before_year(year): + "year -> number of days before January 1st of year." + y = year - 1 + return y*365 + y//4 - y//100 + y//400 + +def _days_in_month(year, month): + "year, month -> number of days in that month in that year." + assert 1 <= month <= 12, month + if month == 2 and _is_leap(year): + return 29 + return _DAYS_IN_MONTH[month] + +def _days_before_month(year, month): + "year, month -> number of days in year preceding first day of month." + assert 1 <= month <= 12, 'month must be in 1..12' + return _DAYS_BEFORE_MONTH[month] + (month > 2 and _is_leap(year)) + +def _ymd2ord(year, month, day): + "year, month, day -> ordinal, considering 01-Jan-0001 as day 1." + assert 1 <= month <= 12, 'month must be in 1..12' + dim = _days_in_month(year, month) + assert 1 <= day <= dim, ('day must be in 1..%d' % dim) + return (_days_before_year(year) + + _days_before_month(year, month) + + day) + +_DI400Y = _days_before_year(401) # number of days in 400 years +_DI100Y = _days_before_year(101) # " " " " 100 " +_DI4Y = _days_before_year(5) # " " " " 4 " + +# A 4-year cycle has an extra leap day over what we'd get from pasting +# together 4 single years. +assert _DI4Y == 4 * 365 + 1 + +# Similarly, a 400-year cycle has an extra leap day over what we'd get from +# pasting together 4 100-year cycles. +assert _DI400Y == 4 * _DI100Y + 1 + +# OTOH, a 100-year cycle has one fewer leap day than we'd get from +# pasting together 25 4-year cycles. +assert _DI100Y == 25 * _DI4Y - 1 + +def _ord2ymd(n): + "ordinal -> (year, month, day), considering 01-Jan-0001 as day 1." + + # n is a 1-based index, starting at 1-Jan-1. The pattern of leap years + # repeats exactly every 400 years. The basic strategy is to find the + # closest 400-year boundary at or before n, then work with the offset + # from that boundary to n. Life is much clearer if we subtract 1 from + # n first -- then the values of n at 400-year boundaries are exactly + # those divisible by _DI400Y: + # + # D M Y n n-1 + # -- --- ---- ---------- ---------------- + # 31 Dec -400 -_DI400Y -_DI400Y -1 + # 1 Jan -399 -_DI400Y +1 -_DI400Y 400-year boundary + # ... + # 30 Dec 000 -1 -2 + # 31 Dec 000 0 -1 + # 1 Jan 001 1 0 400-year boundary + # 2 Jan 001 2 1 + # 3 Jan 001 3 2 + # ... + # 31 Dec 400 _DI400Y _DI400Y -1 + # 1 Jan 401 _DI400Y +1 _DI400Y 400-year boundary + n -= 1 + n400, n = divmod(n, _DI400Y) + year = n400 * 400 + 1 # ..., -399, 1, 401, ... + + # Now n is the (non-negative) offset, in days, from January 1 of year, to + # the desired date. Now compute how many 100-year cycles precede n. + # Note that it's possible for n100 to equal 4! In that case 4 full + # 100-year cycles precede the desired day, which implies the desired + # day is December 31 at the end of a 400-year cycle. + n100, n = divmod(n, _DI100Y) + + # Now compute how many 4-year cycles precede it. + n4, n = divmod(n, _DI4Y) + + # And now how many single years. Again n1 can be 4, and again meaning + # that the desired day is December 31 at the end of the 4-year cycle. + n1, n = divmod(n, 365) + + year += n100 * 100 + n4 * 4 + n1 + if n1 == 4 or n100 == 4: + assert n == 0 + return year-1, 12, 31 + + # Now the year is correct, and n is the offset from January 1. We find + # the month via an estimate that's either exact or one too large. + leapyear = n1 == 3 and (n4 != 24 or n100 == 3) + assert leapyear == _is_leap(year) + month = (n + 50) >> 5 + preceding = _DAYS_BEFORE_MONTH[month] + (month > 2 and leapyear) + if preceding > n: # estimate is too large + month -= 1 + preceding -= _DAYS_IN_MONTH[month] + (month == 2 and leapyear) + n -= preceding + assert 0 <= n < _days_in_month(year, month) + + # Now the year and month are correct, and n is the offset from the + # start of that month: we're done! + return year, month, n+1 + +# Month and day names. For localized versions, see the calendar module. +_MONTHNAMES = [None, "Jan", "Feb", "Mar", "Apr", "May", "Jun", + "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"] +_DAYNAMES = [None, "Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] + + +def _build_struct_time(y, m, d, hh, mm, ss, dstflag): + wday = (_ymd2ord(y, m, d) + 6) % 7 + dnum = _days_before_month(y, m) + d + return _time.struct_time((y, m, d, hh, mm, ss, wday, dnum, dstflag)) + +def _format_time(hh, mm, ss, us, timespec='auto'): + specs = { + 'hours': '{:02d}', + 'minutes': '{:02d}:{:02d}', + 'seconds': '{:02d}:{:02d}:{:02d}', + 'milliseconds': '{:02d}:{:02d}:{:02d}.{:03d}', + 'microseconds': '{:02d}:{:02d}:{:02d}.{:06d}' + } + + if timespec == 'auto': + # Skip trailing microseconds when us==0. + timespec = 'microseconds' if us else 'seconds' + elif timespec == 'milliseconds': + us //= 1000 + try: + fmt = specs[timespec] + except KeyError: + raise ValueError('Unknown timespec value') + else: + return fmt.format(hh, mm, ss, us) + +def _format_offset(off, sep=':'): + s = '' + if off is not None: + if off.days < 0: + sign = "-" + off = -off + else: + sign = "+" + hh, mm = divmod(off, timedelta(hours=1)) + mm, ss = divmod(mm, timedelta(minutes=1)) + s += "%s%02d%s%02d" % (sign, hh, sep, mm) + if ss or ss.microseconds: + s += "%s%02d" % (sep, ss.seconds) + + if ss.microseconds: + s += '.%06d' % ss.microseconds + return s + +# Correctly substitute for %z and %Z escapes in strftime formats. +def _wrap_strftime(object, format, timetuple): + # Don't call utcoffset() or tzname() unless actually needed. + freplace = None # the string to use for %f + zreplace = None # the string to use for %z + colonzreplace = None # the string to use for %:z + Zreplace = None # the string to use for %Z + + # Scan format for %z, %:z and %Z escapes, replacing as needed. + newformat = [] + push = newformat.append + i, n = 0, len(format) + while i < n: + ch = format[i] + i += 1 + if ch == '%': + if i < n: + ch = format[i] + i += 1 + if ch == 'f': + if freplace is None: + freplace = '%06d' % getattr(object, + 'microsecond', 0) + newformat.append(freplace) + elif ch == 'z': + if zreplace is None: + if hasattr(object, "utcoffset"): + zreplace = _format_offset(object.utcoffset(), sep="") + else: + zreplace = "" + assert '%' not in zreplace + newformat.append(zreplace) + elif ch == ':': + if i < n: + ch2 = format[i] + i += 1 + if ch2 == 'z': + if colonzreplace is None: + if hasattr(object, "utcoffset"): + colonzreplace = _format_offset(object.utcoffset(), sep=":") + else: + colonzreplace = "" + assert '%' not in colonzreplace + newformat.append(colonzreplace) + else: + push('%') + push(ch) + push(ch2) + elif ch == 'Z': + if Zreplace is None: + Zreplace = "" + if hasattr(object, "tzname"): + s = object.tzname() + if s is not None: + # strftime is going to have at this: escape % + Zreplace = s.replace('%', '%%') + newformat.append(Zreplace) + else: + push('%') + push(ch) + else: + push('%') + else: + push(ch) + newformat = "".join(newformat) + return _time.strftime(newformat, timetuple) + +# Helpers for parsing the result of isoformat() +def _is_ascii_digit(c): + return c in "0123456789" + +def _find_isoformat_datetime_separator(dtstr): + # See the comment in _datetimemodule.c:_find_isoformat_datetime_separator + len_dtstr = len(dtstr) + if len_dtstr == 7: + return 7 + + assert len_dtstr > 7 + date_separator = "-" + week_indicator = "W" + + if dtstr[4] == date_separator: + if dtstr[5] == week_indicator: + if len_dtstr < 8: + raise ValueError("Invalid ISO string") + if len_dtstr > 8 and dtstr[8] == date_separator: + if len_dtstr == 9: + raise ValueError("Invalid ISO string") + if len_dtstr > 10 and _is_ascii_digit(dtstr[10]): + # This is as far as we need to resolve the ambiguity for + # the moment - if we have YYYY-Www-##, the separator is + # either a hyphen at 8 or a number at 10. + # + # We'll assume it's a hyphen at 8 because it's way more + # likely that someone will use a hyphen as a separator than + # a number, but at this point it's really best effort + # because this is an extension of the spec anyway. + # TODO(pganssle): Document this + return 8 + return 10 + else: + # YYYY-Www (8) + return 8 + else: + # YYYY-MM-DD (10) + return 10 + else: + if dtstr[4] == week_indicator: + # YYYYWww (7) or YYYYWwwd (8) + idx = 7 + while idx < len_dtstr: + if not _is_ascii_digit(dtstr[idx]): + break + idx += 1 + + if idx < 9: + return idx + + if idx % 2 == 0: + # If the index of the last number is even, it's YYYYWwwd + return 7 + else: + return 8 + else: + # YYYYMMDD (8) + return 8 + + +def _parse_isoformat_date(dtstr): + # It is assumed that this is an ASCII-only string of lengths 7, 8 or 10, + # see the comment on Modules/_datetimemodule.c:_find_isoformat_datetime_separator + assert len(dtstr) in (7, 8, 10) + year = int(dtstr[0:4]) + has_sep = dtstr[4] == '-' + + pos = 4 + has_sep + if dtstr[pos:pos + 1] == "W": + # YYYY-?Www-?D? + pos += 1 + weekno = int(dtstr[pos:pos + 2]) + pos += 2 + + dayno = 1 + if len(dtstr) > pos: + if (dtstr[pos:pos + 1] == '-') != has_sep: + raise ValueError("Inconsistent use of dash separator") + + pos += has_sep + + dayno = int(dtstr[pos:pos + 1]) + + return list(_isoweek_to_gregorian(year, weekno, dayno)) + else: + month = int(dtstr[pos:pos + 2]) + pos += 2 + if (dtstr[pos:pos + 1] == "-") != has_sep: + raise ValueError("Inconsistent use of dash separator") + + pos += has_sep + day = int(dtstr[pos:pos + 2]) + + return [year, month, day] + + +_FRACTION_CORRECTION = [100000, 10000, 1000, 100, 10] + + +def _parse_hh_mm_ss_ff(tstr): + # Parses things of the form HH[:?MM[:?SS[{.,}fff[fff]]]] + len_str = len(tstr) + + time_comps = [0, 0, 0, 0] + pos = 0 + for comp in range(0, 3): + if (len_str - pos) < 2: + raise ValueError("Incomplete time component") + + time_comps[comp] = int(tstr[pos:pos+2]) + + pos += 2 + next_char = tstr[pos:pos+1] + + if comp == 0: + has_sep = next_char == ':' + + if not next_char or comp >= 2: + break + + if has_sep and next_char != ':': + raise ValueError("Invalid time separator: %c" % next_char) + + pos += has_sep + + if pos < len_str: + if tstr[pos] not in '.,': + raise ValueError("Invalid microsecond component") + else: + pos += 1 + + len_remainder = len_str - pos + + if len_remainder >= 6: + to_parse = 6 + else: + to_parse = len_remainder + + time_comps[3] = int(tstr[pos:(pos+to_parse)]) + if to_parse < 6: + time_comps[3] *= _FRACTION_CORRECTION[to_parse-1] + if (len_remainder > to_parse + and not all(map(_is_ascii_digit, tstr[(pos+to_parse):]))): + raise ValueError("Non-digit values in unparsed fraction") + + return time_comps + +def _parse_isoformat_time(tstr): + # Format supported is HH[:MM[:SS[.fff[fff]]]][+HH:MM[:SS[.ffffff]]] + len_str = len(tstr) + if len_str < 2: + raise ValueError("Isoformat time too short") + + # This is equivalent to re.search('[+-Z]', tstr), but faster + tz_pos = (tstr.find('-') + 1 or tstr.find('+') + 1 or tstr.find('Z') + 1) + timestr = tstr[:tz_pos-1] if tz_pos > 0 else tstr + + time_comps = _parse_hh_mm_ss_ff(timestr) + + tzi = None + if tz_pos == len_str and tstr[-1] == 'Z': + tzi = timezone.utc + elif tz_pos > 0: + tzstr = tstr[tz_pos:] + + # Valid time zone strings are: + # HH len: 2 + # HHMM len: 4 + # HH:MM len: 5 + # HHMMSS len: 6 + # HHMMSS.f+ len: 7+ + # HH:MM:SS len: 8 + # HH:MM:SS.f+ len: 10+ + + if len(tzstr) in (0, 1, 3): + raise ValueError("Malformed time zone string") + + tz_comps = _parse_hh_mm_ss_ff(tzstr) + + if all(x == 0 for x in tz_comps): + tzi = timezone.utc + else: + tzsign = -1 if tstr[tz_pos - 1] == '-' else 1 + + td = timedelta(hours=tz_comps[0], minutes=tz_comps[1], + seconds=tz_comps[2], microseconds=tz_comps[3]) + + tzi = timezone(tzsign * td) + + time_comps.append(tzi) + + return time_comps + +# tuple[int, int, int] -> tuple[int, int, int] version of date.fromisocalendar +def _isoweek_to_gregorian(year, week, day): + # Year is bounded this way because 9999-12-31 is (9999, 52, 5) + if not MINYEAR <= year <= MAXYEAR: + raise ValueError(f"Year is out of range: {year}") + + if not 0 < week < 53: + out_of_range = True + + if week == 53: + # ISO years have 53 weeks in them on years starting with a + # Thursday and leap years starting on a Wednesday + first_weekday = _ymd2ord(year, 1, 1) % 7 + if (first_weekday == 4 or (first_weekday == 3 and + _is_leap(year))): + out_of_range = False + + if out_of_range: + raise ValueError(f"Invalid week: {week}") + + if not 0 < day < 8: + raise ValueError(f"Invalid weekday: {day} (range is [1, 7])") + + # Now compute the offset from (Y, 1, 1) in days: + day_offset = (week - 1) * 7 + (day - 1) + + # Calculate the ordinal day for monday, week 1 + day_1 = _isoweek1monday(year) + ord_day = day_1 + day_offset + + return _ord2ymd(ord_day) + + +# Just raise TypeError if the arg isn't None or a string. +def _check_tzname(name): + if name is not None and not isinstance(name, str): + raise TypeError("tzinfo.tzname() must return None or string, " + "not '%s'" % type(name)) + +# name is the offset-producing method, "utcoffset" or "dst". +# offset is what it returned. +# If offset isn't None or timedelta, raises TypeError. +# If offset is None, returns None. +# Else offset is checked for being in range. +# If it is, its integer value is returned. Else ValueError is raised. +def _check_utc_offset(name, offset): + assert name in ("utcoffset", "dst") + if offset is None: + return + if not isinstance(offset, timedelta): + raise TypeError("tzinfo.%s() must return None " + "or timedelta, not '%s'" % (name, type(offset))) + if not -timedelta(1) < offset < timedelta(1): + raise ValueError("%s()=%s, must be strictly between " + "-timedelta(hours=24) and timedelta(hours=24)" % + (name, offset)) + +def _check_date_fields(year, month, day): + year = _index(year) + month = _index(month) + day = _index(day) + if not MINYEAR <= year <= MAXYEAR: + raise ValueError('year must be in %d..%d' % (MINYEAR, MAXYEAR), year) + if not 1 <= month <= 12: + raise ValueError('month must be in 1..12', month) + dim = _days_in_month(year, month) + if not 1 <= day <= dim: + raise ValueError('day must be in 1..%d' % dim, day) + return year, month, day + +def _check_time_fields(hour, minute, second, microsecond, fold): + hour = _index(hour) + minute = _index(minute) + second = _index(second) + microsecond = _index(microsecond) + if not 0 <= hour <= 23: + raise ValueError('hour must be in 0..23', hour) + if not 0 <= minute <= 59: + raise ValueError('minute must be in 0..59', minute) + if not 0 <= second <= 59: + raise ValueError('second must be in 0..59', second) + if not 0 <= microsecond <= 999999: + raise ValueError('microsecond must be in 0..999999', microsecond) + if fold not in (0, 1): + raise ValueError('fold must be either 0 or 1', fold) + return hour, minute, second, microsecond, fold + +def _check_tzinfo_arg(tz): + if tz is not None and not isinstance(tz, tzinfo): + raise TypeError("tzinfo argument must be None or of a tzinfo subclass") + +def _cmperror(x, y): + raise TypeError("can't compare '%s' to '%s'" % ( + type(x).__name__, type(y).__name__)) + +def _divide_and_round(a, b): + """divide a by b and round result to the nearest integer + + When the ratio is exactly half-way between two integers, + the even integer is returned. + """ + # Based on the reference implementation for divmod_near + # in Objects/longobject.c. + q, r = divmod(a, b) + # round up if either r / b > 0.5, or r / b == 0.5 and q is odd. + # The expression r / b > 0.5 is equivalent to 2 * r > b if b is + # positive, 2 * r < b if b negative. + r *= 2 + greater_than_half = r > b if b > 0 else r < b + if greater_than_half or r == b and q % 2 == 1: + q += 1 + + return q + + +class timedelta: + """Represent the difference between two datetime objects. + + Supported operators: + + - add, subtract timedelta + - unary plus, minus, abs + - compare to timedelta + - multiply, divide by int + + In addition, datetime supports subtraction of two datetime objects + returning a timedelta, and addition or subtraction of a datetime + and a timedelta giving a datetime. + + Representation: (days, seconds, microseconds). + """ + # The representation of (days, seconds, microseconds) was chosen + # arbitrarily; the exact rationale originally specified in the docstring + # was "Because I felt like it." + + __slots__ = '_days', '_seconds', '_microseconds', '_hashcode' + + def __new__(cls, days=0, seconds=0, microseconds=0, + milliseconds=0, minutes=0, hours=0, weeks=0): + # Doing this efficiently and accurately in C is going to be difficult + # and error-prone, due to ubiquitous overflow possibilities, and that + # C double doesn't have enough bits of precision to represent + # microseconds over 10K years faithfully. The code here tries to make + # explicit where go-fast assumptions can be relied on, in order to + # guide the C implementation; it's way more convoluted than speed- + # ignoring auto-overflow-to-long idiomatic Python could be. + + # XXX Check that all inputs are ints or floats. + + # Final values, all integer. + # s and us fit in 32-bit signed ints; d isn't bounded. + d = s = us = 0 + + # Normalize everything to days, seconds, microseconds. + days += weeks*7 + seconds += minutes*60 + hours*3600 + microseconds += milliseconds*1000 + + # Get rid of all fractions, and normalize s and us. + # Take a deep breath . + if isinstance(days, float): + dayfrac, days = _math.modf(days) + daysecondsfrac, daysecondswhole = _math.modf(dayfrac * (24.*3600.)) + assert daysecondswhole == int(daysecondswhole) # can't overflow + s = int(daysecondswhole) + assert days == int(days) + d = int(days) + else: + daysecondsfrac = 0.0 + d = days + assert isinstance(daysecondsfrac, float) + assert abs(daysecondsfrac) <= 1.0 + assert isinstance(d, int) + assert abs(s) <= 24 * 3600 + # days isn't referenced again before redefinition + + if isinstance(seconds, float): + secondsfrac, seconds = _math.modf(seconds) + assert seconds == int(seconds) + seconds = int(seconds) + secondsfrac += daysecondsfrac + assert abs(secondsfrac) <= 2.0 + else: + secondsfrac = daysecondsfrac + # daysecondsfrac isn't referenced again + assert isinstance(secondsfrac, float) + assert abs(secondsfrac) <= 2.0 + + assert isinstance(seconds, int) + days, seconds = divmod(seconds, 24*3600) + d += days + s += int(seconds) # can't overflow + assert isinstance(s, int) + assert abs(s) <= 2 * 24 * 3600 + # seconds isn't referenced again before redefinition + + usdouble = secondsfrac * 1e6 + assert abs(usdouble) < 2.1e6 # exact value not critical + # secondsfrac isn't referenced again + + if isinstance(microseconds, float): + microseconds = round(microseconds + usdouble) + seconds, microseconds = divmod(microseconds, 1000000) + days, seconds = divmod(seconds, 24*3600) + d += days + s += seconds + else: + microseconds = int(microseconds) + seconds, microseconds = divmod(microseconds, 1000000) + days, seconds = divmod(seconds, 24*3600) + d += days + s += seconds + microseconds = round(microseconds + usdouble) + assert isinstance(s, int) + assert isinstance(microseconds, int) + assert abs(s) <= 3 * 24 * 3600 + assert abs(microseconds) < 3.1e6 + + # Just a little bit of carrying possible for microseconds and seconds. + seconds, us = divmod(microseconds, 1000000) + s += seconds + days, s = divmod(s, 24*3600) + d += days + + assert isinstance(d, int) + assert isinstance(s, int) and 0 <= s < 24*3600 + assert isinstance(us, int) and 0 <= us < 1000000 + + if abs(d) > 999999999: + raise OverflowError("timedelta # of days is too large: %d" % d) + + self = object.__new__(cls) + self._days = d + self._seconds = s + self._microseconds = us + self._hashcode = -1 + return self + + def __repr__(self): + args = [] + if self._days: + args.append("days=%d" % self._days) + if self._seconds: + args.append("seconds=%d" % self._seconds) + if self._microseconds: + args.append("microseconds=%d" % self._microseconds) + if not args: + args.append('0') + return "%s.%s(%s)" % (_get_class_module(self), + self.__class__.__qualname__, + ', '.join(args)) + + def __str__(self): + mm, ss = divmod(self._seconds, 60) + hh, mm = divmod(mm, 60) + s = "%d:%02d:%02d" % (hh, mm, ss) + if self._days: + def plural(n): + return n, abs(n) != 1 and "s" or "" + s = ("%d day%s, " % plural(self._days)) + s + if self._microseconds: + s = s + ".%06d" % self._microseconds + return s + + def total_seconds(self): + """Total seconds in the duration.""" + return ((self.days * 86400 + self.seconds) * 10**6 + + self.microseconds) / 10**6 + + # Read-only field accessors + @property + def days(self): + """days""" + return self._days + + @property + def seconds(self): + """seconds""" + return self._seconds + + @property + def microseconds(self): + """microseconds""" + return self._microseconds + + def __add__(self, other): + if isinstance(other, timedelta): + # for CPython compatibility, we cannot use + # our __class__ here, but need a real timedelta + return timedelta(self._days + other._days, + self._seconds + other._seconds, + self._microseconds + other._microseconds) + return NotImplemented + + __radd__ = __add__ + + def __sub__(self, other): + if isinstance(other, timedelta): + # for CPython compatibility, we cannot use + # our __class__ here, but need a real timedelta + return timedelta(self._days - other._days, + self._seconds - other._seconds, + self._microseconds - other._microseconds) + return NotImplemented + + def __rsub__(self, other): + if isinstance(other, timedelta): + return -self + other + return NotImplemented + + def __neg__(self): + # for CPython compatibility, we cannot use + # our __class__ here, but need a real timedelta + return timedelta(-self._days, + -self._seconds, + -self._microseconds) + + def __pos__(self): + return self + + def __abs__(self): + if self._days < 0: + return -self + else: + return self + + def __mul__(self, other): + if isinstance(other, int): + # for CPython compatibility, we cannot use + # our __class__ here, but need a real timedelta + return timedelta(self._days * other, + self._seconds * other, + self._microseconds * other) + if isinstance(other, float): + usec = self._to_microseconds() + a, b = other.as_integer_ratio() + return timedelta(0, 0, _divide_and_round(usec * a, b)) + return NotImplemented + + __rmul__ = __mul__ + + def _to_microseconds(self): + return ((self._days * (24*3600) + self._seconds) * 1000000 + + self._microseconds) + + def __floordiv__(self, other): + if not isinstance(other, (int, timedelta)): + return NotImplemented + usec = self._to_microseconds() + if isinstance(other, timedelta): + return usec // other._to_microseconds() + if isinstance(other, int): + return timedelta(0, 0, usec // other) + + def __truediv__(self, other): + if not isinstance(other, (int, float, timedelta)): + return NotImplemented + usec = self._to_microseconds() + if isinstance(other, timedelta): + return usec / other._to_microseconds() + if isinstance(other, int): + return timedelta(0, 0, _divide_and_round(usec, other)) + if isinstance(other, float): + a, b = other.as_integer_ratio() + return timedelta(0, 0, _divide_and_round(b * usec, a)) + + def __mod__(self, other): + if isinstance(other, timedelta): + r = self._to_microseconds() % other._to_microseconds() + return timedelta(0, 0, r) + return NotImplemented + + def __divmod__(self, other): + if isinstance(other, timedelta): + q, r = divmod(self._to_microseconds(), + other._to_microseconds()) + return q, timedelta(0, 0, r) + return NotImplemented + + # Comparisons of timedelta objects with other. + + def __eq__(self, other): + if isinstance(other, timedelta): + return self._cmp(other) == 0 + else: + return NotImplemented + + def __le__(self, other): + if isinstance(other, timedelta): + return self._cmp(other) <= 0 + else: + return NotImplemented + + def __lt__(self, other): + if isinstance(other, timedelta): + return self._cmp(other) < 0 + else: + return NotImplemented + + def __ge__(self, other): + if isinstance(other, timedelta): + return self._cmp(other) >= 0 + else: + return NotImplemented + + def __gt__(self, other): + if isinstance(other, timedelta): + return self._cmp(other) > 0 + else: + return NotImplemented + + def _cmp(self, other): + assert isinstance(other, timedelta) + return _cmp(self._getstate(), other._getstate()) + + def __hash__(self): + if self._hashcode == -1: + self._hashcode = hash(self._getstate()) + return self._hashcode + + def __bool__(self): + return (self._days != 0 or + self._seconds != 0 or + self._microseconds != 0) + + # Pickle support. + + def _getstate(self): + return (self._days, self._seconds, self._microseconds) + + def __reduce__(self): + return (self.__class__, self._getstate()) + +timedelta.min = timedelta(-999999999) +timedelta.max = timedelta(days=999999999, hours=23, minutes=59, seconds=59, + microseconds=999999) +timedelta.resolution = timedelta(microseconds=1) + +class date: + """Concrete date type. + + Constructors: + + __new__() + fromtimestamp() + today() + fromordinal() + + Operators: + + __repr__, __str__ + __eq__, __le__, __lt__, __ge__, __gt__, __hash__ + __add__, __radd__, __sub__ (add/radd only with timedelta arg) + + Methods: + + timetuple() + toordinal() + weekday() + isoweekday(), isocalendar(), isoformat() + ctime() + strftime() + + Properties (readonly): + year, month, day + """ + __slots__ = '_year', '_month', '_day', '_hashcode' + + def __new__(cls, year, month=None, day=None): + """Constructor. + + Arguments: + + year, month, day (required, base 1) + """ + if (month is None and + isinstance(year, (bytes, str)) and len(year) == 4 and + 1 <= ord(year[2:3]) <= 12): + # Pickle support + if isinstance(year, str): + try: + year = year.encode('latin1') + except UnicodeEncodeError: + # More informative error message. + raise ValueError( + "Failed to encode latin1 string when unpickling " + "a date object. " + "pickle.load(data, encoding='latin1') is assumed.") + self = object.__new__(cls) + self.__setstate(year) + self._hashcode = -1 + return self + year, month, day = _check_date_fields(year, month, day) + self = object.__new__(cls) + self._year = year + self._month = month + self._day = day + self._hashcode = -1 + return self + + # Additional constructors + + @classmethod + def fromtimestamp(cls, t): + "Construct a date from a POSIX timestamp (like time.time())." + y, m, d, hh, mm, ss, weekday, jday, dst = _time.localtime(t) + return cls(y, m, d) + + @classmethod + def today(cls): + "Construct a date from time.time()." + t = _time.time() + return cls.fromtimestamp(t) + + @classmethod + def fromordinal(cls, n): + """Construct a date from a proleptic Gregorian ordinal. + + January 1 of year 1 is day 1. Only the year, month and day are + non-zero in the result. + """ + y, m, d = _ord2ymd(n) + return cls(y, m, d) + + @classmethod + def fromisoformat(cls, date_string): + """Construct a date from a string in ISO 8601 format.""" + if not isinstance(date_string, str): + raise TypeError('fromisoformat: argument must be str') + + if len(date_string) not in (7, 8, 10): + raise ValueError(f'Invalid isoformat string: {date_string!r}') + + try: + return cls(*_parse_isoformat_date(date_string)) + except Exception: + raise ValueError(f'Invalid isoformat string: {date_string!r}') + + @classmethod + def fromisocalendar(cls, year, week, day): + """Construct a date from the ISO year, week number and weekday. + + This is the inverse of the date.isocalendar() function""" + return cls(*_isoweek_to_gregorian(year, week, day)) + + # Conversions to string + + def __repr__(self): + """Convert to formal string, for repr(). + + >>> dt = datetime(2010, 1, 1) + >>> repr(dt) + 'datetime.datetime(2010, 1, 1, 0, 0)' + + >>> dt = datetime(2010, 1, 1, tzinfo=timezone.utc) + >>> repr(dt) + 'datetime.datetime(2010, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)' + """ + return "%s.%s(%d, %d, %d)" % (_get_class_module(self), + self.__class__.__qualname__, + self._year, + self._month, + self._day) + # XXX These shouldn't depend on time.localtime(), because that + # clips the usable dates to [1970 .. 2038). At least ctime() is + # easily done without using strftime() -- that's better too because + # strftime("%c", ...) is locale specific. + + + def ctime(self): + "Return ctime() style string." + weekday = self.toordinal() % 7 or 7 + return "%s %s %2d 00:00:00 %04d" % ( + _DAYNAMES[weekday], + _MONTHNAMES[self._month], + self._day, self._year) + + def strftime(self, format): + """ + Format using strftime(). + + Example: "%d/%m/%Y, %H:%M:%S" + """ + return _wrap_strftime(self, format, self.timetuple()) + + def __format__(self, fmt): + if not isinstance(fmt, str): + raise TypeError("must be str, not %s" % type(fmt).__name__) + if len(fmt) != 0: + return self.strftime(fmt) + return str(self) + + def isoformat(self): + """Return the date formatted according to ISO. + + This is 'YYYY-MM-DD'. + + References: + - http://www.w3.org/TR/NOTE-datetime + - http://www.cl.cam.ac.uk/~mgk25/iso-time.html + """ + return "%04d-%02d-%02d" % (self._year, self._month, self._day) + + __str__ = isoformat + + # Read-only field accessors + @property + def year(self): + """year (1-9999)""" + return self._year + + @property + def month(self): + """month (1-12)""" + return self._month + + @property + def day(self): + """day (1-31)""" + return self._day + + # Standard conversions, __eq__, __le__, __lt__, __ge__, __gt__, + # __hash__ (and helpers) + + def timetuple(self): + "Return local time tuple compatible with time.localtime()." + return _build_struct_time(self._year, self._month, self._day, + 0, 0, 0, -1) + + def toordinal(self): + """Return proleptic Gregorian ordinal for the year, month and day. + + January 1 of year 1 is day 1. Only the year, month and day values + contribute to the result. + """ + return _ymd2ord(self._year, self._month, self._day) + + def replace(self, year=None, month=None, day=None): + """Return a new date with new values for the specified fields.""" + if year is None: + year = self._year + if month is None: + month = self._month + if day is None: + day = self._day + return type(self)(year, month, day) + + # Comparisons of date objects with other. + + def __eq__(self, other): + if isinstance(other, date): + return self._cmp(other) == 0 + return NotImplemented + + def __le__(self, other): + if isinstance(other, date): + return self._cmp(other) <= 0 + return NotImplemented + + def __lt__(self, other): + if isinstance(other, date): + return self._cmp(other) < 0 + return NotImplemented + + def __ge__(self, other): + if isinstance(other, date): + return self._cmp(other) >= 0 + return NotImplemented + + def __gt__(self, other): + if isinstance(other, date): + return self._cmp(other) > 0 + return NotImplemented + + def _cmp(self, other): + assert isinstance(other, date) + y, m, d = self._year, self._month, self._day + y2, m2, d2 = other._year, other._month, other._day + return _cmp((y, m, d), (y2, m2, d2)) + + def __hash__(self): + "Hash." + if self._hashcode == -1: + self._hashcode = hash(self._getstate()) + return self._hashcode + + # Computations + + def __add__(self, other): + "Add a date to a timedelta." + if isinstance(other, timedelta): + o = self.toordinal() + other.days + if 0 < o <= _MAXORDINAL: + return type(self).fromordinal(o) + raise OverflowError("result out of range") + return NotImplemented + + __radd__ = __add__ + + def __sub__(self, other): + """Subtract two dates, or a date and a timedelta.""" + if isinstance(other, timedelta): + return self + timedelta(-other.days) + if isinstance(other, date): + days1 = self.toordinal() + days2 = other.toordinal() + return timedelta(days1 - days2) + return NotImplemented + + def weekday(self): + "Return day of the week, where Monday == 0 ... Sunday == 6." + return (self.toordinal() + 6) % 7 + + # Day-of-the-week and week-of-the-year, according to ISO + + def isoweekday(self): + "Return day of the week, where Monday == 1 ... Sunday == 7." + # 1-Jan-0001 is a Monday + return self.toordinal() % 7 or 7 + + def isocalendar(self): + """Return a named tuple containing ISO year, week number, and weekday. + + The first ISO week of the year is the (Mon-Sun) week + containing the year's first Thursday; everything else derives + from that. + + The first week is 1; Monday is 1 ... Sunday is 7. + + ISO calendar algorithm taken from + http://www.phys.uu.nl/~vgent/calendar/isocalendar.htm + (used with permission) + """ + year = self._year + week1monday = _isoweek1monday(year) + today = _ymd2ord(self._year, self._month, self._day) + # Internally, week and day have origin 0 + week, day = divmod(today - week1monday, 7) + if week < 0: + year -= 1 + week1monday = _isoweek1monday(year) + week, day = divmod(today - week1monday, 7) + elif week >= 52: + if today >= _isoweek1monday(year+1): + year += 1 + week = 0 + return _IsoCalendarDate(year, week+1, day+1) + + # Pickle support. + + def _getstate(self): + yhi, ylo = divmod(self._year, 256) + return bytes([yhi, ylo, self._month, self._day]), + + def __setstate(self, string): + yhi, ylo, self._month, self._day = string + self._year = yhi * 256 + ylo + + def __reduce__(self): + return (self.__class__, self._getstate()) + +_date_class = date # so functions w/ args named "date" can get at the class + +date.min = date(1, 1, 1) +date.max = date(9999, 12, 31) +date.resolution = timedelta(days=1) + + +class tzinfo: + """Abstract base class for time zone info classes. + + Subclasses must override the name(), utcoffset() and dst() methods. + """ + __slots__ = () + + def tzname(self, dt): + "datetime -> string name of time zone." + raise NotImplementedError("tzinfo subclass must override tzname()") + + def utcoffset(self, dt): + "datetime -> timedelta, positive for east of UTC, negative for west of UTC" + raise NotImplementedError("tzinfo subclass must override utcoffset()") + + def dst(self, dt): + """datetime -> DST offset as timedelta, positive for east of UTC. + + Return 0 if DST not in effect. utcoffset() must include the DST + offset. + """ + raise NotImplementedError("tzinfo subclass must override dst()") + + def fromutc(self, dt): + "datetime in UTC -> datetime in local time." + + if not isinstance(dt, datetime): + raise TypeError("fromutc() requires a datetime argument") + if dt.tzinfo is not self: + raise ValueError("dt.tzinfo is not self") + + dtoff = dt.utcoffset() + if dtoff is None: + raise ValueError("fromutc() requires a non-None utcoffset() " + "result") + + # See the long comment block at the end of this file for an + # explanation of this algorithm. + dtdst = dt.dst() + if dtdst is None: + raise ValueError("fromutc() requires a non-None dst() result") + delta = dtoff - dtdst + if delta: + dt += delta + dtdst = dt.dst() + if dtdst is None: + raise ValueError("fromutc(): dt.dst gave inconsistent " + "results; cannot convert") + return dt + dtdst + + # Pickle support. + + def __reduce__(self): + getinitargs = getattr(self, "__getinitargs__", None) + if getinitargs: + args = getinitargs() + else: + args = () + return (self.__class__, args, self.__getstate__()) + + +class IsoCalendarDate(tuple): + + def __new__(cls, year, week, weekday, /): + return super().__new__(cls, (year, week, weekday)) + + @property + def year(self): + return self[0] + + @property + def week(self): + return self[1] + + @property + def weekday(self): + return self[2] + + def __reduce__(self): + # This code is intended to pickle the object without making the + # class public. See https://bugs.python.org/msg352381 + return (tuple, (tuple(self),)) + + def __repr__(self): + return (f'{self.__class__.__name__}' + f'(year={self[0]}, week={self[1]}, weekday={self[2]})') + + +_IsoCalendarDate = IsoCalendarDate +del IsoCalendarDate +_tzinfo_class = tzinfo + +class time: + """Time with time zone. + + Constructors: + + __new__() + + Operators: + + __repr__, __str__ + __eq__, __le__, __lt__, __ge__, __gt__, __hash__ + + Methods: + + strftime() + isoformat() + utcoffset() + tzname() + dst() + + Properties (readonly): + hour, minute, second, microsecond, tzinfo, fold + """ + __slots__ = '_hour', '_minute', '_second', '_microsecond', '_tzinfo', '_hashcode', '_fold' + + def __new__(cls, hour=0, minute=0, second=0, microsecond=0, tzinfo=None, *, fold=0): + """Constructor. + + Arguments: + + hour, minute (required) + second, microsecond (default to zero) + tzinfo (default to None) + fold (keyword only, default to zero) + """ + if (isinstance(hour, (bytes, str)) and len(hour) == 6 and + ord(hour[0:1])&0x7F < 24): + # Pickle support + if isinstance(hour, str): + try: + hour = hour.encode('latin1') + except UnicodeEncodeError: + # More informative error message. + raise ValueError( + "Failed to encode latin1 string when unpickling " + "a time object. " + "pickle.load(data, encoding='latin1') is assumed.") + self = object.__new__(cls) + self.__setstate(hour, minute or None) + self._hashcode = -1 + return self + hour, minute, second, microsecond, fold = _check_time_fields( + hour, minute, second, microsecond, fold) + _check_tzinfo_arg(tzinfo) + self = object.__new__(cls) + self._hour = hour + self._minute = minute + self._second = second + self._microsecond = microsecond + self._tzinfo = tzinfo + self._hashcode = -1 + self._fold = fold + return self + + # Read-only field accessors + @property + def hour(self): + """hour (0-23)""" + return self._hour + + @property + def minute(self): + """minute (0-59)""" + return self._minute + + @property + def second(self): + """second (0-59)""" + return self._second + + @property + def microsecond(self): + """microsecond (0-999999)""" + return self._microsecond + + @property + def tzinfo(self): + """timezone info object""" + return self._tzinfo + + @property + def fold(self): + return self._fold + + # Standard conversions, __hash__ (and helpers) + + # Comparisons of time objects with other. + + def __eq__(self, other): + if isinstance(other, time): + return self._cmp(other, allow_mixed=True) == 0 + else: + return NotImplemented + + def __le__(self, other): + if isinstance(other, time): + return self._cmp(other) <= 0 + else: + return NotImplemented + + def __lt__(self, other): + if isinstance(other, time): + return self._cmp(other) < 0 + else: + return NotImplemented + + def __ge__(self, other): + if isinstance(other, time): + return self._cmp(other) >= 0 + else: + return NotImplemented + + def __gt__(self, other): + if isinstance(other, time): + return self._cmp(other) > 0 + else: + return NotImplemented + + def _cmp(self, other, allow_mixed=False): + assert isinstance(other, time) + mytz = self._tzinfo + ottz = other._tzinfo + myoff = otoff = None + + if mytz is ottz: + base_compare = True + else: + myoff = self.utcoffset() + otoff = other.utcoffset() + base_compare = myoff == otoff + + if base_compare: + return _cmp((self._hour, self._minute, self._second, + self._microsecond), + (other._hour, other._minute, other._second, + other._microsecond)) + if myoff is None or otoff is None: + if allow_mixed: + return 2 # arbitrary non-zero value + else: + raise TypeError("cannot compare naive and aware times") + myhhmm = self._hour * 60 + self._minute - myoff//timedelta(minutes=1) + othhmm = other._hour * 60 + other._minute - otoff//timedelta(minutes=1) + return _cmp((myhhmm, self._second, self._microsecond), + (othhmm, other._second, other._microsecond)) + + def __hash__(self): + """Hash.""" + if self._hashcode == -1: + if self.fold: + t = self.replace(fold=0) + else: + t = self + tzoff = t.utcoffset() + if not tzoff: # zero or None + self._hashcode = hash(t._getstate()[0]) + else: + h, m = divmod(timedelta(hours=self.hour, minutes=self.minute) - tzoff, + timedelta(hours=1)) + assert not m % timedelta(minutes=1), "whole minute" + m //= timedelta(minutes=1) + if 0 <= h < 24: + self._hashcode = hash(time(h, m, self.second, self.microsecond)) + else: + self._hashcode = hash((h, m, self.second, self.microsecond)) + return self._hashcode + + # Conversion to string + + def _tzstr(self): + """Return formatted timezone offset (+xx:xx) or an empty string.""" + off = self.utcoffset() + return _format_offset(off) + + def __repr__(self): + """Convert to formal string, for repr().""" + if self._microsecond != 0: + s = ", %d, %d" % (self._second, self._microsecond) + elif self._second != 0: + s = ", %d" % self._second + else: + s = "" + s= "%s.%s(%d, %d%s)" % (_get_class_module(self), + self.__class__.__qualname__, + self._hour, self._minute, s) + if self._tzinfo is not None: + assert s[-1:] == ")" + s = s[:-1] + ", tzinfo=%r" % self._tzinfo + ")" + if self._fold: + assert s[-1:] == ")" + s = s[:-1] + ", fold=1)" + return s + + def isoformat(self, timespec='auto'): + """Return the time formatted according to ISO. + + The full format is 'HH:MM:SS.mmmmmm+zz:zz'. By default, the fractional + part is omitted if self.microsecond == 0. + + The optional argument timespec specifies the number of additional + terms of the time to include. Valid options are 'auto', 'hours', + 'minutes', 'seconds', 'milliseconds' and 'microseconds'. + """ + s = _format_time(self._hour, self._minute, self._second, + self._microsecond, timespec) + tz = self._tzstr() + if tz: + s += tz + return s + + __str__ = isoformat + + @classmethod + def fromisoformat(cls, time_string): + """Construct a time from a string in one of the ISO 8601 formats.""" + if not isinstance(time_string, str): + raise TypeError('fromisoformat: argument must be str') + + # The spec actually requires that time-only ISO 8601 strings start with + # T, but the extended format allows this to be omitted as long as there + # is no ambiguity with date strings. + time_string = time_string.removeprefix('T') + + try: + return cls(*_parse_isoformat_time(time_string)) + except Exception: + raise ValueError(f'Invalid isoformat string: {time_string!r}') + + def strftime(self, format): + """Format using strftime(). The date part of the timestamp passed + to underlying strftime should not be used. + """ + # The year must be >= 1000 else Python's strftime implementation + # can raise a bogus exception. + timetuple = (1900, 1, 1, + self._hour, self._minute, self._second, + 0, 1, -1) + return _wrap_strftime(self, format, timetuple) + + def __format__(self, fmt): + if not isinstance(fmt, str): + raise TypeError("must be str, not %s" % type(fmt).__name__) + if len(fmt) != 0: + return self.strftime(fmt) + return str(self) + + # Timezone functions + + def utcoffset(self): + """Return the timezone offset as timedelta, positive east of UTC + (negative west of UTC).""" + if self._tzinfo is None: + return None + offset = self._tzinfo.utcoffset(None) + _check_utc_offset("utcoffset", offset) + return offset + + def tzname(self): + """Return the timezone name. + + Note that the name is 100% informational -- there's no requirement that + it mean anything in particular. For example, "GMT", "UTC", "-500", + "-5:00", "EDT", "US/Eastern", "America/New York" are all valid replies. + """ + if self._tzinfo is None: + return None + name = self._tzinfo.tzname(None) + _check_tzname(name) + return name + + def dst(self): + """Return 0 if DST is not in effect, or the DST offset (as timedelta + positive eastward) if DST is in effect. + + This is purely informational; the DST offset has already been added to + the UTC offset returned by utcoffset() if applicable, so there's no + need to consult dst() unless you're interested in displaying the DST + info. + """ + if self._tzinfo is None: + return None + offset = self._tzinfo.dst(None) + _check_utc_offset("dst", offset) + return offset + + def replace(self, hour=None, minute=None, second=None, microsecond=None, + tzinfo=True, *, fold=None): + """Return a new time with new values for the specified fields.""" + if hour is None: + hour = self.hour + if minute is None: + minute = self.minute + if second is None: + second = self.second + if microsecond is None: + microsecond = self.microsecond + if tzinfo is True: + tzinfo = self.tzinfo + if fold is None: + fold = self._fold + return type(self)(hour, minute, second, microsecond, tzinfo, fold=fold) + + # Pickle support. + + def _getstate(self, protocol=3): + us2, us3 = divmod(self._microsecond, 256) + us1, us2 = divmod(us2, 256) + h = self._hour + if self._fold and protocol > 3: + h += 128 + basestate = bytes([h, self._minute, self._second, + us1, us2, us3]) + if self._tzinfo is None: + return (basestate,) + else: + return (basestate, self._tzinfo) + + def __setstate(self, string, tzinfo): + if tzinfo is not None and not isinstance(tzinfo, _tzinfo_class): + raise TypeError("bad tzinfo state arg") + h, self._minute, self._second, us1, us2, us3 = string + if h > 127: + self._fold = 1 + self._hour = h - 128 + else: + self._fold = 0 + self._hour = h + self._microsecond = (((us1 << 8) | us2) << 8) | us3 + self._tzinfo = tzinfo + + def __reduce_ex__(self, protocol): + return (self.__class__, self._getstate(protocol)) + + def __reduce__(self): + return self.__reduce_ex__(2) + +_time_class = time # so functions w/ args named "time" can get at the class + +time.min = time(0, 0, 0) +time.max = time(23, 59, 59, 999999) +time.resolution = timedelta(microseconds=1) + + +class datetime(date): + """datetime(year, month, day[, hour[, minute[, second[, microsecond[,tzinfo]]]]]) + + The year, month and day arguments are required. tzinfo may be None, or an + instance of a tzinfo subclass. The remaining arguments may be ints. + """ + __slots__ = date.__slots__ + time.__slots__ + + def __new__(cls, year, month=None, day=None, hour=0, minute=0, second=0, + microsecond=0, tzinfo=None, *, fold=0): + if (isinstance(year, (bytes, str)) and len(year) == 10 and + 1 <= ord(year[2:3])&0x7F <= 12): + # Pickle support + if isinstance(year, str): + try: + year = bytes(year, 'latin1') + except UnicodeEncodeError: + # More informative error message. + raise ValueError( + "Failed to encode latin1 string when unpickling " + "a datetime object. " + "pickle.load(data, encoding='latin1') is assumed.") + self = object.__new__(cls) + self.__setstate(year, month) + self._hashcode = -1 + return self + year, month, day = _check_date_fields(year, month, day) + hour, minute, second, microsecond, fold = _check_time_fields( + hour, minute, second, microsecond, fold) + _check_tzinfo_arg(tzinfo) + self = object.__new__(cls) + self._year = year + self._month = month + self._day = day + self._hour = hour + self._minute = minute + self._second = second + self._microsecond = microsecond + self._tzinfo = tzinfo + self._hashcode = -1 + self._fold = fold + return self + + # Read-only field accessors + @property + def hour(self): + """hour (0-23)""" + return self._hour + + @property + def minute(self): + """minute (0-59)""" + return self._minute + + @property + def second(self): + """second (0-59)""" + return self._second + + @property + def microsecond(self): + """microsecond (0-999999)""" + return self._microsecond + + @property + def tzinfo(self): + """timezone info object""" + return self._tzinfo + + @property + def fold(self): + return self._fold + + @classmethod + def _fromtimestamp(cls, t, utc, tz): + """Construct a datetime from a POSIX timestamp (like time.time()). + + A timezone info object may be passed in as well. + """ + frac, t = _math.modf(t) + us = round(frac * 1e6) + if us >= 1000000: + t += 1 + us -= 1000000 + elif us < 0: + t -= 1 + us += 1000000 + + converter = _time.gmtime if utc else _time.localtime + y, m, d, hh, mm, ss, weekday, jday, dst = converter(t) + ss = min(ss, 59) # clamp out leap seconds if the platform has them + result = cls(y, m, d, hh, mm, ss, us, tz) + if tz is None and not utc: + # As of version 2015f max fold in IANA database is + # 23 hours at 1969-09-30 13:00:00 in Kwajalein. + # Let's probe 24 hours in the past to detect a transition: + max_fold_seconds = 24 * 3600 + + # On Windows localtime_s throws an OSError for negative values, + # thus we can't perform fold detection for values of time less + # than the max time fold. See comments in _datetimemodule's + # version of this method for more details. + if t < max_fold_seconds and sys.platform.startswith("win"): + return result + + y, m, d, hh, mm, ss = converter(t - max_fold_seconds)[:6] + probe1 = cls(y, m, d, hh, mm, ss, us, tz) + trans = result - probe1 - timedelta(0, max_fold_seconds) + if trans.days < 0: + y, m, d, hh, mm, ss = converter(t + trans // timedelta(0, 1))[:6] + probe2 = cls(y, m, d, hh, mm, ss, us, tz) + if probe2 == result: + result._fold = 1 + elif tz is not None: + result = tz.fromutc(result) + return result + + @classmethod + def fromtimestamp(cls, timestamp, tz=None): + """Construct a datetime from a POSIX timestamp (like time.time()). + + A timezone info object may be passed in as well. + """ + _check_tzinfo_arg(tz) + + return cls._fromtimestamp(timestamp, tz is not None, tz) + + @classmethod + def utcfromtimestamp(cls, t): + """Construct a naive UTC datetime from a POSIX timestamp.""" + import warnings + warnings.warn("datetime.utcfromtimestamp() is deprecated and scheduled " + "for removal in a future version. Use timezone-aware " + "objects to represent datetimes in UTC: " + "datetime.fromtimestamp(t, datetime.UTC).", + DeprecationWarning, + stacklevel=2) + return cls._fromtimestamp(t, True, None) + + @classmethod + def now(cls, tz=None): + "Construct a datetime from time.time() and optional time zone info." + t = _time.time() + return cls.fromtimestamp(t, tz) + + @classmethod + def utcnow(cls): + "Construct a UTC datetime from time.time()." + import warnings + warnings.warn("datetime.utcnow() is deprecated and scheduled for " + "removal in a future version. Instead, Use timezone-aware " + "objects to represent datetimes in UTC: " + "datetime.now(datetime.UTC).", + DeprecationWarning, + stacklevel=2) + t = _time.time() + return cls._fromtimestamp(t, True, None) + + @classmethod + def combine(cls, date, time, tzinfo=True): + "Construct a datetime from a given date and a given time." + if not isinstance(date, _date_class): + raise TypeError("date argument must be a date instance") + if not isinstance(time, _time_class): + raise TypeError("time argument must be a time instance") + if tzinfo is True: + tzinfo = time.tzinfo + return cls(date.year, date.month, date.day, + time.hour, time.minute, time.second, time.microsecond, + tzinfo, fold=time.fold) + + @classmethod + def fromisoformat(cls, date_string): + """Construct a datetime from a string in one of the ISO 8601 formats.""" + if not isinstance(date_string, str): + raise TypeError('fromisoformat: argument must be str') + + if len(date_string) < 7: + raise ValueError(f'Invalid isoformat string: {date_string!r}') + + # Split this at the separator + try: + separator_location = _find_isoformat_datetime_separator(date_string) + dstr = date_string[0:separator_location] + tstr = date_string[(separator_location+1):] + + date_components = _parse_isoformat_date(dstr) + except ValueError: + raise ValueError( + f'Invalid isoformat string: {date_string!r}') from None + + if tstr: + try: + time_components = _parse_isoformat_time(tstr) + except ValueError: + raise ValueError( + f'Invalid isoformat string: {date_string!r}') from None + else: + time_components = [0, 0, 0, 0, None] + + return cls(*(date_components + time_components)) + + def timetuple(self): + "Return local time tuple compatible with time.localtime()." + dst = self.dst() + if dst is None: + dst = -1 + elif dst: + dst = 1 + else: + dst = 0 + return _build_struct_time(self.year, self.month, self.day, + self.hour, self.minute, self.second, + dst) + + def _mktime(self): + """Return integer POSIX timestamp.""" + epoch = datetime(1970, 1, 1) + max_fold_seconds = 24 * 3600 + t = (self - epoch) // timedelta(0, 1) + def local(u): + y, m, d, hh, mm, ss = _time.localtime(u)[:6] + return (datetime(y, m, d, hh, mm, ss) - epoch) // timedelta(0, 1) + + # Our goal is to solve t = local(u) for u. + a = local(t) - t + u1 = t - a + t1 = local(u1) + if t1 == t: + # We found one solution, but it may not be the one we need. + # Look for an earlier solution (if `fold` is 0), or a + # later one (if `fold` is 1). + u2 = u1 + (-max_fold_seconds, max_fold_seconds)[self.fold] + b = local(u2) - u2 + if a == b: + return u1 + else: + b = t1 - u1 + assert a != b + u2 = t - b + t2 = local(u2) + if t2 == t: + return u2 + if t1 == t: + return u1 + # We have found both offsets a and b, but neither t - a nor t - b is + # a solution. This means t is in the gap. + return (max, min)[self.fold](u1, u2) + + + def timestamp(self): + "Return POSIX timestamp as float" + if self._tzinfo is None: + s = self._mktime() + return s + self.microsecond / 1e6 + else: + return (self - _EPOCH).total_seconds() + + def utctimetuple(self): + "Return UTC time tuple compatible with time.gmtime()." + offset = self.utcoffset() + if offset: + self -= offset + y, m, d = self.year, self.month, self.day + hh, mm, ss = self.hour, self.minute, self.second + return _build_struct_time(y, m, d, hh, mm, ss, 0) + + def date(self): + "Return the date part." + return date(self._year, self._month, self._day) + + def time(self): + "Return the time part, with tzinfo None." + return time(self.hour, self.minute, self.second, self.microsecond, fold=self.fold) + + def timetz(self): + "Return the time part, with same tzinfo." + return time(self.hour, self.minute, self.second, self.microsecond, + self._tzinfo, fold=self.fold) + + def replace(self, year=None, month=None, day=None, hour=None, + minute=None, second=None, microsecond=None, tzinfo=True, + *, fold=None): + """Return a new datetime with new values for the specified fields.""" + if year is None: + year = self.year + if month is None: + month = self.month + if day is None: + day = self.day + if hour is None: + hour = self.hour + if minute is None: + minute = self.minute + if second is None: + second = self.second + if microsecond is None: + microsecond = self.microsecond + if tzinfo is True: + tzinfo = self.tzinfo + if fold is None: + fold = self.fold + return type(self)(year, month, day, hour, minute, second, + microsecond, tzinfo, fold=fold) + + def _local_timezone(self): + if self.tzinfo is None: + ts = self._mktime() + # Detect gap + ts2 = self.replace(fold=1-self.fold)._mktime() + if ts2 != ts: # This happens in a gap or a fold + if (ts2 > ts) == self.fold: + ts = ts2 + else: + ts = (self - _EPOCH) // timedelta(seconds=1) + localtm = _time.localtime(ts) + local = datetime(*localtm[:6]) + # Extract TZ data + gmtoff = localtm.tm_gmtoff + zone = localtm.tm_zone + return timezone(timedelta(seconds=gmtoff), zone) + + def astimezone(self, tz=None): + if tz is None: + tz = self._local_timezone() + elif not isinstance(tz, tzinfo): + raise TypeError("tz argument must be an instance of tzinfo") + + mytz = self.tzinfo + if mytz is None: + mytz = self._local_timezone() + myoffset = mytz.utcoffset(self) + else: + myoffset = mytz.utcoffset(self) + if myoffset is None: + mytz = self.replace(tzinfo=None)._local_timezone() + myoffset = mytz.utcoffset(self) + + if tz is mytz: + return self + + # Convert self to UTC, and attach the new time zone object. + utc = (self - myoffset).replace(tzinfo=tz) + + # Convert from UTC to tz's local time. + return tz.fromutc(utc) + + # Ways to produce a string. + + def ctime(self): + "Return ctime() style string." + weekday = self.toordinal() % 7 or 7 + return "%s %s %2d %02d:%02d:%02d %04d" % ( + _DAYNAMES[weekday], + _MONTHNAMES[self._month], + self._day, + self._hour, self._minute, self._second, + self._year) + + def isoformat(self, sep='T', timespec='auto'): + """Return the time formatted according to ISO. + + The full format looks like 'YYYY-MM-DD HH:MM:SS.mmmmmm'. + By default, the fractional part is omitted if self.microsecond == 0. + + If self.tzinfo is not None, the UTC offset is also attached, giving + giving a full format of 'YYYY-MM-DD HH:MM:SS.mmmmmm+HH:MM'. + + Optional argument sep specifies the separator between date and + time, default 'T'. + + The optional argument timespec specifies the number of additional + terms of the time to include. Valid options are 'auto', 'hours', + 'minutes', 'seconds', 'milliseconds' and 'microseconds'. + """ + s = ("%04d-%02d-%02d%c" % (self._year, self._month, self._day, sep) + + _format_time(self._hour, self._minute, self._second, + self._microsecond, timespec)) + + off = self.utcoffset() + tz = _format_offset(off) + if tz: + s += tz + + return s + + def __repr__(self): + """Convert to formal string, for repr().""" + L = [self._year, self._month, self._day, # These are never zero + self._hour, self._minute, self._second, self._microsecond] + if L[-1] == 0: + del L[-1] + if L[-1] == 0: + del L[-1] + s = "%s.%s(%s)" % (_get_class_module(self), + self.__class__.__qualname__, + ", ".join(map(str, L))) + if self._tzinfo is not None: + assert s[-1:] == ")" + s = s[:-1] + ", tzinfo=%r" % self._tzinfo + ")" + if self._fold: + assert s[-1:] == ")" + s = s[:-1] + ", fold=1)" + return s + + def __str__(self): + "Convert to string, for str()." + return self.isoformat(sep=' ') + + @classmethod + def strptime(cls, date_string, format): + 'string, format -> new datetime parsed from a string (like time.strptime()).' + import _strptime + return _strptime._strptime_datetime(cls, date_string, format) + + def utcoffset(self): + """Return the timezone offset as timedelta positive east of UTC (negative west of + UTC).""" + if self._tzinfo is None: + return None + offset = self._tzinfo.utcoffset(self) + _check_utc_offset("utcoffset", offset) + return offset + + def tzname(self): + """Return the timezone name. + + Note that the name is 100% informational -- there's no requirement that + it mean anything in particular. For example, "GMT", "UTC", "-500", + "-5:00", "EDT", "US/Eastern", "America/New York" are all valid replies. + """ + if self._tzinfo is None: + return None + name = self._tzinfo.tzname(self) + _check_tzname(name) + return name + + def dst(self): + """Return 0 if DST is not in effect, or the DST offset (as timedelta + positive eastward) if DST is in effect. + + This is purely informational; the DST offset has already been added to + the UTC offset returned by utcoffset() if applicable, so there's no + need to consult dst() unless you're interested in displaying the DST + info. + """ + if self._tzinfo is None: + return None + offset = self._tzinfo.dst(self) + _check_utc_offset("dst", offset) + return offset + + # Comparisons of datetime objects with other. + + def __eq__(self, other): + if isinstance(other, datetime): + return self._cmp(other, allow_mixed=True) == 0 + elif not isinstance(other, date): + return NotImplemented + else: + return False + + def __le__(self, other): + if isinstance(other, datetime): + return self._cmp(other) <= 0 + elif not isinstance(other, date): + return NotImplemented + else: + _cmperror(self, other) + + def __lt__(self, other): + if isinstance(other, datetime): + return self._cmp(other) < 0 + elif not isinstance(other, date): + return NotImplemented + else: + _cmperror(self, other) + + def __ge__(self, other): + if isinstance(other, datetime): + return self._cmp(other) >= 0 + elif not isinstance(other, date): + return NotImplemented + else: + _cmperror(self, other) + + def __gt__(self, other): + if isinstance(other, datetime): + return self._cmp(other) > 0 + elif not isinstance(other, date): + return NotImplemented + else: + _cmperror(self, other) + + def _cmp(self, other, allow_mixed=False): + assert isinstance(other, datetime) + mytz = self._tzinfo + ottz = other._tzinfo + myoff = otoff = None + + if mytz is ottz: + base_compare = True + else: + myoff = self.utcoffset() + otoff = other.utcoffset() + # Assume that allow_mixed means that we are called from __eq__ + if allow_mixed: + if myoff != self.replace(fold=not self.fold).utcoffset(): + return 2 + if otoff != other.replace(fold=not other.fold).utcoffset(): + return 2 + base_compare = myoff == otoff + + if base_compare: + return _cmp((self._year, self._month, self._day, + self._hour, self._minute, self._second, + self._microsecond), + (other._year, other._month, other._day, + other._hour, other._minute, other._second, + other._microsecond)) + if myoff is None or otoff is None: + if allow_mixed: + return 2 # arbitrary non-zero value + else: + raise TypeError("cannot compare naive and aware datetimes") + # XXX What follows could be done more efficiently... + diff = self - other # this will take offsets into account + if diff.days < 0: + return -1 + return diff and 1 or 0 + + def __add__(self, other): + "Add a datetime and a timedelta." + if not isinstance(other, timedelta): + return NotImplemented + delta = timedelta(self.toordinal(), + hours=self._hour, + minutes=self._minute, + seconds=self._second, + microseconds=self._microsecond) + delta += other + hour, rem = divmod(delta.seconds, 3600) + minute, second = divmod(rem, 60) + if 0 < delta.days <= _MAXORDINAL: + return type(self).combine(date.fromordinal(delta.days), + time(hour, minute, second, + delta.microseconds, + tzinfo=self._tzinfo)) + raise OverflowError("result out of range") + + __radd__ = __add__ + + def __sub__(self, other): + "Subtract two datetimes, or a datetime and a timedelta." + if not isinstance(other, datetime): + if isinstance(other, timedelta): + return self + -other + return NotImplemented + + days1 = self.toordinal() + days2 = other.toordinal() + secs1 = self._second + self._minute * 60 + self._hour * 3600 + secs2 = other._second + other._minute * 60 + other._hour * 3600 + base = timedelta(days1 - days2, + secs1 - secs2, + self._microsecond - other._microsecond) + if self._tzinfo is other._tzinfo: + return base + myoff = self.utcoffset() + otoff = other.utcoffset() + if myoff == otoff: + return base + if myoff is None or otoff is None: + raise TypeError("cannot mix naive and timezone-aware time") + return base + otoff - myoff + + def __hash__(self): + if self._hashcode == -1: + if self.fold: + t = self.replace(fold=0) + else: + t = self + tzoff = t.utcoffset() + if tzoff is None: + self._hashcode = hash(t._getstate()[0]) + else: + days = _ymd2ord(self.year, self.month, self.day) + seconds = self.hour * 3600 + self.minute * 60 + self.second + self._hashcode = hash(timedelta(days, seconds, self.microsecond) - tzoff) + return self._hashcode + + # Pickle support. + + def _getstate(self, protocol=3): + yhi, ylo = divmod(self._year, 256) + us2, us3 = divmod(self._microsecond, 256) + us1, us2 = divmod(us2, 256) + m = self._month + if self._fold and protocol > 3: + m += 128 + basestate = bytes([yhi, ylo, m, self._day, + self._hour, self._minute, self._second, + us1, us2, us3]) + if self._tzinfo is None: + return (basestate,) + else: + return (basestate, self._tzinfo) + + def __setstate(self, string, tzinfo): + if tzinfo is not None and not isinstance(tzinfo, _tzinfo_class): + raise TypeError("bad tzinfo state arg") + (yhi, ylo, m, self._day, self._hour, + self._minute, self._second, us1, us2, us3) = string + if m > 127: + self._fold = 1 + self._month = m - 128 + else: + self._fold = 0 + self._month = m + self._year = yhi * 256 + ylo + self._microsecond = (((us1 << 8) | us2) << 8) | us3 + self._tzinfo = tzinfo + + def __reduce_ex__(self, protocol): + return (self.__class__, self._getstate(protocol)) + + def __reduce__(self): + return self.__reduce_ex__(2) + + +datetime.min = datetime(1, 1, 1) +datetime.max = datetime(9999, 12, 31, 23, 59, 59, 999999) +datetime.resolution = timedelta(microseconds=1) + + +def _isoweek1monday(year): + # Helper to calculate the day number of the Monday starting week 1 + # XXX This could be done more efficiently + THURSDAY = 3 + firstday = _ymd2ord(year, 1, 1) + firstweekday = (firstday + 6) % 7 # See weekday() above + week1monday = firstday - firstweekday + if firstweekday > THURSDAY: + week1monday += 7 + return week1monday + + +class timezone(tzinfo): + __slots__ = '_offset', '_name' + + # Sentinel value to disallow None + _Omitted = object() + def __new__(cls, offset, name=_Omitted): + if not isinstance(offset, timedelta): + raise TypeError("offset must be a timedelta") + if name is cls._Omitted: + if not offset: + return cls.utc + name = None + elif not isinstance(name, str): + raise TypeError("name must be a string") + if not cls._minoffset <= offset <= cls._maxoffset: + raise ValueError("offset must be a timedelta " + "strictly between -timedelta(hours=24) and " + "timedelta(hours=24).") + return cls._create(offset, name) + + @classmethod + def _create(cls, offset, name=None): + self = tzinfo.__new__(cls) + self._offset = offset + self._name = name + return self + + def __getinitargs__(self): + """pickle support""" + if self._name is None: + return (self._offset,) + return (self._offset, self._name) + + def __eq__(self, other): + if isinstance(other, timezone): + return self._offset == other._offset + return NotImplemented + + def __hash__(self): + return hash(self._offset) + + def __repr__(self): + """Convert to formal string, for repr(). + + >>> tz = timezone.utc + >>> repr(tz) + 'datetime.timezone.utc' + >>> tz = timezone(timedelta(hours=-5), 'EST') + >>> repr(tz) + "datetime.timezone(datetime.timedelta(-1, 68400), 'EST')" + """ + if self is self.utc: + return 'datetime.timezone.utc' + if self._name is None: + return "%s.%s(%r)" % (_get_class_module(self), + self.__class__.__qualname__, + self._offset) + return "%s.%s(%r, %r)" % (_get_class_module(self), + self.__class__.__qualname__, + self._offset, self._name) + + def __str__(self): + return self.tzname(None) + + def utcoffset(self, dt): + if isinstance(dt, datetime) or dt is None: + return self._offset + raise TypeError("utcoffset() argument must be a datetime instance" + " or None") + + def tzname(self, dt): + if isinstance(dt, datetime) or dt is None: + if self._name is None: + return self._name_from_offset(self._offset) + return self._name + raise TypeError("tzname() argument must be a datetime instance" + " or None") + + def dst(self, dt): + if isinstance(dt, datetime) or dt is None: + return None + raise TypeError("dst() argument must be a datetime instance" + " or None") + + def fromutc(self, dt): + if isinstance(dt, datetime): + if dt.tzinfo is not self: + raise ValueError("fromutc: dt.tzinfo " + "is not self") + return dt + self._offset + raise TypeError("fromutc() argument must be a datetime instance" + " or None") + + _maxoffset = timedelta(hours=24, microseconds=-1) + _minoffset = -_maxoffset + + @staticmethod + def _name_from_offset(delta): + if not delta: + return 'UTC' + if delta < timedelta(0): + sign = '-' + delta = -delta + else: + sign = '+' + hours, rest = divmod(delta, timedelta(hours=1)) + minutes, rest = divmod(rest, timedelta(minutes=1)) + seconds = rest.seconds + microseconds = rest.microseconds + if microseconds: + return (f'UTC{sign}{hours:02d}:{minutes:02d}:{seconds:02d}' + f'.{microseconds:06d}') + if seconds: + return f'UTC{sign}{hours:02d}:{minutes:02d}:{seconds:02d}' + return f'UTC{sign}{hours:02d}:{minutes:02d}' + +UTC = timezone.utc = timezone._create(timedelta(0)) + +# bpo-37642: These attributes are rounded to the nearest minute for backwards +# compatibility, even though the constructor will accept a wider range of +# values. This may change in the future. +timezone.min = timezone._create(-timedelta(hours=23, minutes=59)) +timezone.max = timezone._create(timedelta(hours=23, minutes=59)) +_EPOCH = datetime(1970, 1, 1, tzinfo=timezone.utc) + +# Some time zone algebra. For a datetime x, let +# x.n = x stripped of its timezone -- its naive time. +# x.o = x.utcoffset(), and assuming that doesn't raise an exception or +# return None +# x.d = x.dst(), and assuming that doesn't raise an exception or +# return None +# x.s = x's standard offset, x.o - x.d +# +# Now some derived rules, where k is a duration (timedelta). +# +# 1. x.o = x.s + x.d +# This follows from the definition of x.s. +# +# 2. If x and y have the same tzinfo member, x.s = y.s. +# This is actually a requirement, an assumption we need to make about +# sane tzinfo classes. +# +# 3. The naive UTC time corresponding to x is x.n - x.o. +# This is again a requirement for a sane tzinfo class. +# +# 4. (x+k).s = x.s +# This follows from #2, and that datetime.timetz+timedelta preserves tzinfo. +# +# 5. (x+k).n = x.n + k +# Again follows from how arithmetic is defined. +# +# Now we can explain tz.fromutc(x). Let's assume it's an interesting case +# (meaning that the various tzinfo methods exist, and don't blow up or return +# None when called). +# +# The function wants to return a datetime y with timezone tz, equivalent to x. +# x is already in UTC. +# +# By #3, we want +# +# y.n - y.o = x.n [1] +# +# The algorithm starts by attaching tz to x.n, and calling that y. So +# x.n = y.n at the start. Then it wants to add a duration k to y, so that [1] +# becomes true; in effect, we want to solve [2] for k: +# +# (y+k).n - (y+k).o = x.n [2] +# +# By #1, this is the same as +# +# (y+k).n - ((y+k).s + (y+k).d) = x.n [3] +# +# By #5, (y+k).n = y.n + k, which equals x.n + k because x.n=y.n at the start. +# Substituting that into [3], +# +# x.n + k - (y+k).s - (y+k).d = x.n; the x.n terms cancel, leaving +# k - (y+k).s - (y+k).d = 0; rearranging, +# k = (y+k).s - (y+k).d; by #4, (y+k).s == y.s, so +# k = y.s - (y+k).d +# +# On the RHS, (y+k).d can't be computed directly, but y.s can be, and we +# approximate k by ignoring the (y+k).d term at first. Note that k can't be +# very large, since all offset-returning methods return a duration of magnitude +# less than 24 hours. For that reason, if y is firmly in std time, (y+k).d must +# be 0, so ignoring it has no consequence then. +# +# In any case, the new value is +# +# z = y + y.s [4] +# +# It's helpful to step back at look at [4] from a higher level: it's simply +# mapping from UTC to tz's standard time. +# +# At this point, if +# +# z.n - z.o = x.n [5] +# +# we have an equivalent time, and are almost done. The insecurity here is +# at the start of daylight time. Picture US Eastern for concreteness. The wall +# time jumps from 1:59 to 3:00, and wall hours of the form 2:MM don't make good +# sense then. The docs ask that an Eastern tzinfo class consider such a time to +# be EDT (because it's "after 2"), which is a redundant spelling of 1:MM EST +# on the day DST starts. We want to return the 1:MM EST spelling because that's +# the only spelling that makes sense on the local wall clock. +# +# In fact, if [5] holds at this point, we do have the standard-time spelling, +# but that takes a bit of proof. We first prove a stronger result. What's the +# difference between the LHS and RHS of [5]? Let +# +# diff = x.n - (z.n - z.o) [6] +# +# Now +# z.n = by [4] +# (y + y.s).n = by #5 +# y.n + y.s = since y.n = x.n +# x.n + y.s = since z and y are have the same tzinfo member, +# y.s = z.s by #2 +# x.n + z.s +# +# Plugging that back into [6] gives +# +# diff = +# x.n - ((x.n + z.s) - z.o) = expanding +# x.n - x.n - z.s + z.o = cancelling +# - z.s + z.o = by #2 +# z.d +# +# So diff = z.d. +# +# If [5] is true now, diff = 0, so z.d = 0 too, and we have the standard-time +# spelling we wanted in the endcase described above. We're done. Contrarily, +# if z.d = 0, then we have a UTC equivalent, and are also done. +# +# If [5] is not true now, diff = z.d != 0, and z.d is the offset we need to +# add to z (in effect, z is in tz's standard time, and we need to shift the +# local clock into tz's daylight time). +# +# Let +# +# z' = z + z.d = z + diff [7] +# +# and we can again ask whether +# +# z'.n - z'.o = x.n [8] +# +# If so, we're done. If not, the tzinfo class is insane, according to the +# assumptions we've made. This also requires a bit of proof. As before, let's +# compute the difference between the LHS and RHS of [8] (and skipping some of +# the justifications for the kinds of substitutions we've done several times +# already): +# +# diff' = x.n - (z'.n - z'.o) = replacing z'.n via [7] +# x.n - (z.n + diff - z'.o) = replacing diff via [6] +# x.n - (z.n + x.n - (z.n - z.o) - z'.o) = +# x.n - z.n - x.n + z.n - z.o + z'.o = cancel x.n +# - z.n + z.n - z.o + z'.o = cancel z.n +# - z.o + z'.o = #1 twice +# -z.s - z.d + z'.s + z'.d = z and z' have same tzinfo +# z'.d - z.d +# +# So z' is UTC-equivalent to x iff z'.d = z.d at this point. If they are equal, +# we've found the UTC-equivalent so are done. In fact, we stop with [7] and +# return z', not bothering to compute z'.d. +# +# How could z.d and z'd differ? z' = z + z.d [7], so merely moving z' by +# a dst() offset, and starting *from* a time already in DST (we know z.d != 0), +# would have to change the result dst() returns: we start in DST, and moving +# a little further into it takes us out of DST. +# +# There isn't a sane case where this can happen. The closest it gets is at +# the end of DST, where there's an hour in UTC with no spelling in a hybrid +# tzinfo class. In US Eastern, that's 5:MM UTC = 0:MM EST = 1:MM EDT. During +# that hour, on an Eastern clock 1:MM is taken as being in standard time (6:MM +# UTC) because the docs insist on that, but 0:MM is taken as being in daylight +# time (4:MM UTC). There is no local time mapping to 5:MM UTC. The local +# clock jumps from 1:59 back to 1:00 again, and repeats the 1:MM hour in +# standard time. Since that's what the local clock *does*, we want to map both +# UTC hours 5:MM and 6:MM to 1:MM Eastern. The result is ambiguous +# in local time, but so it goes -- it's the way the local clock works. +# +# When x = 5:MM UTC is the input to this algorithm, x.o=0, y.o=-5 and y.d=0, +# so z=0:MM. z.d=60 (minutes) then, so [5] doesn't hold and we keep going. +# z' = z + z.d = 1:MM then, and z'.d=0, and z'.d - z.d = -60 != 0 so [8] +# (correctly) concludes that z' is not UTC-equivalent to x. +# +# Because we know z.d said z was in daylight time (else [5] would have held and +# we would have stopped then), and we know z.d != z'.d (else [8] would have held +# and we have stopped then), and there are only 2 possible values dst() can +# return in Eastern, it follows that z'.d must be 0 (which it is in the example, +# but the reasoning doesn't depend on the example -- it depends on there being +# two possible dst() outcomes, one zero and the other non-zero). Therefore +# z' must be in standard time, and is the spelling we want in this case. +# +# Note again that z' is not UTC-equivalent as far as the hybrid tzinfo class is +# concerned (because it takes z' as being in standard time rather than the +# daylight time we intend here), but returning it gives the real-life "local +# clock repeats an hour" behavior when mapping the "unspellable" UTC hour into +# tz. +# +# When the input is 6:MM, z=1:MM and z.d=0, and we stop at once, again with +# the 1:MM standard time spelling we want. +# +# So how can this break? One of the assumptions must be violated. Two +# possibilities: +# +# 1) [2] effectively says that y.s is invariant across all y belong to a given +# time zone. This isn't true if, for political reasons or continental drift, +# a region decides to change its base offset from UTC. +# +# 2) There may be versions of "double daylight" time where the tail end of +# the analysis gives up a step too early. I haven't thought about that +# enough to say. +# +# In any case, it's clear that the default fromutc() is strong enough to handle +# "almost all" time zones: so long as the standard offset is invariant, it +# doesn't matter if daylight time transition points change from year to year, or +# if daylight time is skipped in some years; it doesn't matter how large or +# small dst() may get within its bounds; and it doesn't even matter if some +# perverse time zone returns a negative dst()). So a breaking case must be +# pretty bizarre, and a tzinfo subclass can override fromutc() if it is. diff --git a/Lib/argparse.py b/Lib/argparse.py index a819d2650e85f0..f5f44ff02c0d38 100644 --- a/Lib/argparse.py +++ b/Lib/argparse.py @@ -345,21 +345,22 @@ def _format_usage(self, usage, actions, groups, prefix): def get_lines(parts, indent, prefix=None): lines = [] line = [] + indent_length = len(indent) if prefix is not None: line_len = len(prefix) - 1 else: - line_len = len(indent) - 1 + line_len = indent_length - 1 for part in parts: if line_len + 1 + len(part) > text_width and line: lines.append(indent + ' '.join(line)) line = [] - line_len = len(indent) - 1 + line_len = indent_length - 1 line.append(part) line_len += len(part) + 1 if line: lines.append(indent + ' '.join(line)) if prefix is not None: - lines[0] = lines[0][len(indent):] + lines[0] = lines[0][indent_length:] return lines # if prog is short, follow it with optionals or positionals @@ -2605,9 +2606,11 @@ def print_help(self, file=None): def _print_message(self, message, file=None): if message: - if file is None: - file = _sys.stderr - file.write(message) + file = file or _sys.stderr + try: + file.write(message) + except (AttributeError, OSError): + pass # =============== # Exiting methods diff --git a/Lib/ast.py b/Lib/ast.py index d9733a79d3a78f..65152047a22370 100644 --- a/Lib/ast.py +++ b/Lib/ast.py @@ -294,9 +294,7 @@ def get_docstring(node, clean=True): if not(node.body and isinstance(node.body[0], Expr)): return None node = node.body[0].value - if isinstance(node, Str): - text = node.s - elif isinstance(node, Constant) and isinstance(node.value, str): + if isinstance(node, Constant) and isinstance(node.value, str): text = node.value else: return None @@ -499,20 +497,52 @@ def generic_visit(self, node): return node +_DEPRECATED_VALUE_ALIAS_MESSAGE = ( + "{name} is deprecated and will be removed in Python {remove}; use value instead" +) +_DEPRECATED_CLASS_MESSAGE = ( + "{name} is deprecated and will be removed in Python {remove}; " + "use ast.Constant instead" +) + + # If the ast module is loaded more than once, only add deprecated methods once if not hasattr(Constant, 'n'): # The following code is for backward compatibility. # It will be removed in future. - def _getter(self): + def _n_getter(self): + """Deprecated. Use value instead.""" + import warnings + warnings._deprecated( + "Attribute n", message=_DEPRECATED_VALUE_ALIAS_MESSAGE, remove=(3, 14) + ) + return self.value + + def _n_setter(self, value): + import warnings + warnings._deprecated( + "Attribute n", message=_DEPRECATED_VALUE_ALIAS_MESSAGE, remove=(3, 14) + ) + self.value = value + + def _s_getter(self): """Deprecated. Use value instead.""" + import warnings + warnings._deprecated( + "Attribute s", message=_DEPRECATED_VALUE_ALIAS_MESSAGE, remove=(3, 14) + ) return self.value - def _setter(self, value): + def _s_setter(self, value): + import warnings + warnings._deprecated( + "Attribute s", message=_DEPRECATED_VALUE_ALIAS_MESSAGE, remove=(3, 14) + ) self.value = value - Constant.n = property(_getter, _setter) - Constant.s = property(_getter, _setter) + Constant.n = property(_n_getter, _n_setter) + Constant.s = property(_s_getter, _s_setter) class _ABC(type): @@ -520,6 +550,13 @@ def __init__(cls, *args): cls.__doc__ = """Deprecated AST node class. Use ast.Constant instead""" def __instancecheck__(cls, inst): + if cls in _const_types: + import warnings + warnings._deprecated( + f"ast.{cls.__qualname__}", + message=_DEPRECATED_CLASS_MESSAGE, + remove=(3, 14) + ) if not isinstance(inst, Constant): return False if cls in _const_types: @@ -543,6 +580,10 @@ def _new(cls, *args, **kwargs): if pos < len(args): raise TypeError(f"{cls.__name__} got multiple values for argument {key!r}") if cls in _const_types: + import warnings + warnings._deprecated( + f"ast.{cls.__qualname__}", message=_DEPRECATED_CLASS_MESSAGE, remove=(3, 14) + ) return Constant(*args, **kwargs) return Constant.__new__(cls, *args, **kwargs) @@ -565,10 +606,19 @@ class Ellipsis(Constant, metaclass=_ABC): _fields = () def __new__(cls, *args, **kwargs): - if cls is Ellipsis: + if cls is _ast_Ellipsis: + import warnings + warnings._deprecated( + "ast.Ellipsis", message=_DEPRECATED_CLASS_MESSAGE, remove=(3, 14) + ) return Constant(..., *args, **kwargs) return Constant.__new__(cls, *args, **kwargs) +# Keep another reference to Ellipsis in the global namespace +# so it can be referenced in Ellipsis.__new__ +# (The original "Ellipsis" name is removed from the global namespace later on) +_ast_Ellipsis = Ellipsis + _const_types = { Num: (int, float, complex), Str: (str,), @@ -1699,6 +1749,22 @@ def unparse(ast_obj): return unparser.visit(ast_obj) +_deprecated_globals = { + name: globals().pop(name) + for name in ('Num', 'Str', 'Bytes', 'NameConstant', 'Ellipsis') +} + +def __getattr__(name): + if name in _deprecated_globals: + globals()[name] = value = _deprecated_globals[name] + import warnings + warnings._deprecated( + f"ast.{name}", message=_DEPRECATED_CLASS_MESSAGE, remove=(3, 14) + ) + return value + raise AttributeError(f"module 'ast' has no attribute '{name}'") + + def main(): import argparse diff --git a/Lib/asyncio/base_tasks.py b/Lib/asyncio/base_tasks.py index 26298e638cbf0d..c907b683413732 100644 --- a/Lib/asyncio/base_tasks.py +++ b/Lib/asyncio/base_tasks.py @@ -15,11 +15,13 @@ def _task_repr_info(task): info.insert(1, 'name=%r' % task.get_name()) - coro = coroutines._format_coroutine(task._coro) - info.insert(2, f'coro=<{coro}>') - if task._fut_waiter is not None: - info.insert(3, f'wait_for={task._fut_waiter!r}') + info.insert(2, f'wait_for={task._fut_waiter!r}') + + if task._coro: + coro = coroutines._format_coroutine(task._coro) + info.insert(2, f'coro=<{coro}>') + return info diff --git a/Lib/asyncio/taskgroups.py b/Lib/asyncio/taskgroups.py index 0fdea3697ece3d..06b2e0db86a1fe 100644 --- a/Lib/asyncio/taskgroups.py +++ b/Lib/asyncio/taskgroups.py @@ -164,8 +164,14 @@ def create_task(self, coro, *, name=None, context=None): else: task = self._loop.create_task(coro, context=context) tasks._set_task_name(task, name) - task.add_done_callback(self._on_task_done) - self._tasks.add(task) + # optimization: Immediately call the done callback if the task is + # already done (e.g. if the coro was able to complete eagerly), + # and skip scheduling a done callback + if task.done(): + self._on_task_done(task) + else: + self._tasks.add(task) + task.add_done_callback(self._on_task_done) return task # Since Python 3.8 Tasks propagate all exceptions correctly, diff --git a/Lib/asyncio/tasks.py b/Lib/asyncio/tasks.py index c90d32c97add78..8d5bde09ea9b5b 100644 --- a/Lib/asyncio/tasks.py +++ b/Lib/asyncio/tasks.py @@ -6,6 +6,7 @@ 'wait', 'wait_for', 'as_completed', 'sleep', 'gather', 'shield', 'ensure_future', 'run_coroutine_threadsafe', 'current_task', 'all_tasks', + 'create_eager_task_factory', 'eager_task_factory', '_register_task', '_unregister_task', '_enter_task', '_leave_task', ) @@ -43,22 +44,26 @@ def all_tasks(loop=None): """Return a set of all tasks for the loop.""" if loop is None: loop = events.get_running_loop() - # Looping over a WeakSet (_all_tasks) isn't safe as it can be updated from another - # thread while we do so. Therefore we cast it to list prior to filtering. The list - # cast itself requires iteration, so we repeat it several times ignoring - # RuntimeErrors (which are not very likely to occur). See issues 34970 and 36607 for - # details. + # capturing the set of eager tasks first, so if an eager task "graduates" + # to a regular task in another thread, we don't risk missing it. + eager_tasks = list(_eager_tasks) + # Looping over the WeakSet isn't safe as it can be updated from another + # thread, therefore we cast it to list prior to filtering. The list cast + # itself requires iteration, so we repeat it several times ignoring + # RuntimeErrors (which are not very likely to occur). + # See issues 34970 and 36607 for details. + scheduled_tasks = None i = 0 while True: try: - tasks = list(_all_tasks) + scheduled_tasks = list(_scheduled_tasks) except RuntimeError: i += 1 if i >= 1000: raise else: break - return {t for t in tasks + return {t for t in itertools.chain(scheduled_tasks, eager_tasks) if futures._get_loop(t) is loop and not t.done()} @@ -93,7 +98,8 @@ class Task(futures._PyFuture): # Inherit Python Task implementation # status is still pending _log_destroy_pending = True - def __init__(self, coro, *, loop=None, name=None, context=None): + def __init__(self, coro, *, loop=None, name=None, context=None, + eager_start=False): super().__init__(loop=loop) if self._source_traceback: del self._source_traceback[-1] @@ -117,8 +123,11 @@ def __init__(self, coro, *, loop=None, name=None, context=None): else: self._context = context - self._loop.call_soon(self.__step, context=self._context) - _register_task(self) + if eager_start and self._loop.is_running(): + self.__eager_start() + else: + self._loop.call_soon(self.__step, context=self._context) + _register_task(self) def __del__(self): if self._state == futures._PENDING and self._log_destroy_pending: @@ -250,6 +259,25 @@ def uncancel(self): self._num_cancels_requested -= 1 return self._num_cancels_requested + def __eager_start(self): + prev_task = _swap_current_task(self._loop, self) + try: + _register_eager_task(self) + try: + self._context.run(self.__step_run_and_handle_result, None) + finally: + _unregister_eager_task(self) + finally: + try: + curtask = _swap_current_task(self._loop, prev_task) + assert curtask is self + finally: + if self.done(): + self._coro = None + self = None # Needed to break cycles when an exception occurs. + else: + _register_task(self) + def __step(self, exc=None): if self.done(): raise exceptions.InvalidStateError( @@ -258,11 +286,17 @@ def __step(self, exc=None): if not isinstance(exc, exceptions.CancelledError): exc = self._make_cancelled_error() self._must_cancel = False - coro = self._coro self._fut_waiter = None _enter_task(self._loop, self) - # Call either coro.throw(exc) or coro.send(None). + try: + self.__step_run_and_handle_result(exc) + finally: + _leave_task(self._loop, self) + self = None # Needed to break cycles when an exception occurs. + + def __step_run_and_handle_result(self, exc): + coro = self._coro try: if exc is None: # We use the `send` method directly, because coroutines @@ -334,7 +368,6 @@ def __step(self, exc=None): self._loop.call_soon( self.__step, new_exc, context=self._context) finally: - _leave_task(self._loop, self) self = None # Needed to break cycles when an exception occurs. def __wakeup(self, future): @@ -780,6 +813,7 @@ def _done_callback(fut): children = [] nfuts = 0 nfinished = 0 + done_futs = [] loop = None outer = None # bpo-46672 for arg in coros_or_futures: @@ -796,7 +830,10 @@ def _done_callback(fut): nfuts += 1 arg_to_fut[arg] = fut - fut.add_done_callback(_done_callback) + if fut.done(): + done_futs.append(fut) + else: + fut.add_done_callback(_done_callback) else: # There's a duplicate Future object in coros_or_futures. @@ -805,6 +842,13 @@ def _done_callback(fut): children.append(fut) outer = _GatheringFuture(children, loop=loop) + # Run done callbacks after GatheringFuture created so any post-processing + # can be performed at this point + # optimization: in the special case that *all* futures finished eagerly, + # this will effectively complete the gather eagerly, with the last + # callback setting the result (or exception) on outer before returning it + for fut in done_futs: + _done_callback(fut) return outer @@ -897,8 +941,40 @@ def callback(): return future -# WeakSet containing all alive tasks. -_all_tasks = weakref.WeakSet() +def create_eager_task_factory(custom_task_constructor): + """Create a function suitable for use as a task factory on an event-loop. + + Example usage: + + loop.set_task_factory( + asyncio.create_eager_task_factory(my_task_constructor)) + + Now, tasks created will be started immediately (rather than being first + scheduled to an event loop). The constructor argument can be any callable + that returns a Task-compatible object and has a signature compatible + with `Task.__init__`; it must have the `eager_start` keyword argument. + + Most applications will use `Task` for `custom_task_constructor` and in + this case there's no need to call `create_eager_task_factory()` + directly. Instead the global `eager_task_factory` instance can be + used. E.g. `loop.set_task_factory(asyncio.eager_task_factory)`. + """ + + def factory(loop, coro, *, name=None, context=None): + return custom_task_constructor( + coro, loop=loop, name=name, context=context, eager_start=True) + + return factory + + +eager_task_factory = create_eager_task_factory(Task) + + +# Collectively these two sets hold references to the complete set of active +# tasks. Eagerly executed tasks use a faster regular set as an optimization +# but may graduate to a WeakSet if the task blocks on IO. +_scheduled_tasks = weakref.WeakSet() +_eager_tasks = set() # Dictionary containing tasks that are currently active in # all running event loops. {EventLoop: Task} @@ -906,8 +982,13 @@ def callback(): def _register_task(task): - """Register a new task in asyncio as executed by loop.""" - _all_tasks.add(task) + """Register an asyncio Task scheduled to run on an event loop.""" + _scheduled_tasks.add(task) + + +def _register_eager_task(task): + """Register an asyncio Task about to be eagerly executed.""" + _eager_tasks.add(task) def _enter_task(loop, task): @@ -926,28 +1007,49 @@ def _leave_task(loop, task): del _current_tasks[loop] +def _swap_current_task(loop, task): + prev_task = _current_tasks.get(loop) + if task is None: + del _current_tasks[loop] + else: + _current_tasks[loop] = task + return prev_task + + def _unregister_task(task): - """Unregister a task.""" - _all_tasks.discard(task) + """Unregister a completed, scheduled Task.""" + _scheduled_tasks.discard(task) + + +def _unregister_eager_task(task): + """Unregister a task which finished its first eager step.""" + _eager_tasks.discard(task) _py_current_task = current_task _py_register_task = _register_task +_py_register_eager_task = _register_eager_task _py_unregister_task = _unregister_task +_py_unregister_eager_task = _unregister_eager_task _py_enter_task = _enter_task _py_leave_task = _leave_task +_py_swap_current_task = _swap_current_task try: - from _asyncio import (_register_task, _unregister_task, - _enter_task, _leave_task, - _all_tasks, _current_tasks, + from _asyncio import (_register_task, _register_eager_task, + _unregister_task, _unregister_eager_task, + _enter_task, _leave_task, _swap_current_task, + _scheduled_tasks, _eager_tasks, _current_tasks, current_task) except ImportError: pass else: _c_current_task = current_task _c_register_task = _register_task + _c_register_eager_task = _register_eager_task _c_unregister_task = _unregister_task + _c_unregister_eager_task = _unregister_eager_task _c_enter_task = _enter_task _c_leave_task = _leave_task + _c_swap_current_task = _swap_current_task diff --git a/Lib/calendar.py b/Lib/calendar.py index bbd4fea3b88ca4..ea56f12ccc41d0 100644 --- a/Lib/calendar.py +++ b/Lib/calendar.py @@ -83,7 +83,6 @@ class Day(IntEnum): SUNDAY = 6 - # Number of days per month (except for February in leap years) mdays = [0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] @@ -156,7 +155,7 @@ def weekday(year, month, day): """Return weekday (0-6 ~ Mon-Sun) for year, month (1-12), day (1-31).""" if not datetime.MINYEAR <= year <= datetime.MAXYEAR: year = 2000 + year % 400 - return datetime.date(year, month, day).weekday() + return Day(datetime.date(year, month, day).weekday()) def monthrange(year, month): diff --git a/Lib/dataclasses.py b/Lib/dataclasses.py index a73cdc22a5f4b3..b0b8a773b7594f 100644 --- a/Lib/dataclasses.py +++ b/Lib/dataclasses.py @@ -1227,8 +1227,10 @@ def _add_slots(cls, is_frozen, weakref_slot): if is_frozen: # Need this for pickling frozen classes with slots. - cls.__getstate__ = _dataclass_getstate - cls.__setstate__ = _dataclass_setstate + if '__getstate__' not in cls_dict: + cls.__getstate__ = _dataclass_getstate + if '__setstate__' not in cls_dict: + cls.__setstate__ = _dataclass_setstate return cls diff --git a/Lib/datetime.py b/Lib/datetime.py index b0eb1c216a689d..bad8beb4f6b026 100644 --- a/Lib/datetime.py +++ b/Lib/datetime.py @@ -1,2663 +1,9 @@ -"""Concrete date/time and related types. - -See http://www.iana.org/time-zones/repository/tz-link.html for -time zone and DST data sources. -""" - -__all__ = ("date", "datetime", "time", "timedelta", "timezone", "tzinfo", - "MINYEAR", "MAXYEAR", "UTC") - - -import time as _time -import math as _math -import sys -from operator import index as _index - -def _cmp(x, y): - return 0 if x == y else 1 if x > y else -1 - -MINYEAR = 1 -MAXYEAR = 9999 -_MAXORDINAL = 3652059 # date.max.toordinal() - -# Utility functions, adapted from Python's Demo/classes/Dates.py, which -# also assumes the current Gregorian calendar indefinitely extended in -# both directions. Difference: Dates.py calls January 1 of year 0 day -# number 1. The code here calls January 1 of year 1 day number 1. This is -# to match the definition of the "proleptic Gregorian" calendar in Dershowitz -# and Reingold's "Calendrical Calculations", where it's the base calendar -# for all computations. See the book for algorithms for converting between -# proleptic Gregorian ordinals and many other calendar systems. - -# -1 is a placeholder for indexing purposes. -_DAYS_IN_MONTH = [-1, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] - -_DAYS_BEFORE_MONTH = [-1] # -1 is a placeholder for indexing purposes. -dbm = 0 -for dim in _DAYS_IN_MONTH[1:]: - _DAYS_BEFORE_MONTH.append(dbm) - dbm += dim -del dbm, dim - -def _is_leap(year): - "year -> 1 if leap year, else 0." - return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0) - -def _days_before_year(year): - "year -> number of days before January 1st of year." - y = year - 1 - return y*365 + y//4 - y//100 + y//400 - -def _days_in_month(year, month): - "year, month -> number of days in that month in that year." - assert 1 <= month <= 12, month - if month == 2 and _is_leap(year): - return 29 - return _DAYS_IN_MONTH[month] - -def _days_before_month(year, month): - "year, month -> number of days in year preceding first day of month." - assert 1 <= month <= 12, 'month must be in 1..12' - return _DAYS_BEFORE_MONTH[month] + (month > 2 and _is_leap(year)) - -def _ymd2ord(year, month, day): - "year, month, day -> ordinal, considering 01-Jan-0001 as day 1." - assert 1 <= month <= 12, 'month must be in 1..12' - dim = _days_in_month(year, month) - assert 1 <= day <= dim, ('day must be in 1..%d' % dim) - return (_days_before_year(year) + - _days_before_month(year, month) + - day) - -_DI400Y = _days_before_year(401) # number of days in 400 years -_DI100Y = _days_before_year(101) # " " " " 100 " -_DI4Y = _days_before_year(5) # " " " " 4 " - -# A 4-year cycle has an extra leap day over what we'd get from pasting -# together 4 single years. -assert _DI4Y == 4 * 365 + 1 - -# Similarly, a 400-year cycle has an extra leap day over what we'd get from -# pasting together 4 100-year cycles. -assert _DI400Y == 4 * _DI100Y + 1 - -# OTOH, a 100-year cycle has one fewer leap day than we'd get from -# pasting together 25 4-year cycles. -assert _DI100Y == 25 * _DI4Y - 1 - -def _ord2ymd(n): - "ordinal -> (year, month, day), considering 01-Jan-0001 as day 1." - - # n is a 1-based index, starting at 1-Jan-1. The pattern of leap years - # repeats exactly every 400 years. The basic strategy is to find the - # closest 400-year boundary at or before n, then work with the offset - # from that boundary to n. Life is much clearer if we subtract 1 from - # n first -- then the values of n at 400-year boundaries are exactly - # those divisible by _DI400Y: - # - # D M Y n n-1 - # -- --- ---- ---------- ---------------- - # 31 Dec -400 -_DI400Y -_DI400Y -1 - # 1 Jan -399 -_DI400Y +1 -_DI400Y 400-year boundary - # ... - # 30 Dec 000 -1 -2 - # 31 Dec 000 0 -1 - # 1 Jan 001 1 0 400-year boundary - # 2 Jan 001 2 1 - # 3 Jan 001 3 2 - # ... - # 31 Dec 400 _DI400Y _DI400Y -1 - # 1 Jan 401 _DI400Y +1 _DI400Y 400-year boundary - n -= 1 - n400, n = divmod(n, _DI400Y) - year = n400 * 400 + 1 # ..., -399, 1, 401, ... - - # Now n is the (non-negative) offset, in days, from January 1 of year, to - # the desired date. Now compute how many 100-year cycles precede n. - # Note that it's possible for n100 to equal 4! In that case 4 full - # 100-year cycles precede the desired day, which implies the desired - # day is December 31 at the end of a 400-year cycle. - n100, n = divmod(n, _DI100Y) - - # Now compute how many 4-year cycles precede it. - n4, n = divmod(n, _DI4Y) - - # And now how many single years. Again n1 can be 4, and again meaning - # that the desired day is December 31 at the end of the 4-year cycle. - n1, n = divmod(n, 365) - - year += n100 * 100 + n4 * 4 + n1 - if n1 == 4 or n100 == 4: - assert n == 0 - return year-1, 12, 31 - - # Now the year is correct, and n is the offset from January 1. We find - # the month via an estimate that's either exact or one too large. - leapyear = n1 == 3 and (n4 != 24 or n100 == 3) - assert leapyear == _is_leap(year) - month = (n + 50) >> 5 - preceding = _DAYS_BEFORE_MONTH[month] + (month > 2 and leapyear) - if preceding > n: # estimate is too large - month -= 1 - preceding -= _DAYS_IN_MONTH[month] + (month == 2 and leapyear) - n -= preceding - assert 0 <= n < _days_in_month(year, month) - - # Now the year and month are correct, and n is the offset from the - # start of that month: we're done! - return year, month, n+1 - -# Month and day names. For localized versions, see the calendar module. -_MONTHNAMES = [None, "Jan", "Feb", "Mar", "Apr", "May", "Jun", - "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"] -_DAYNAMES = [None, "Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] - - -def _build_struct_time(y, m, d, hh, mm, ss, dstflag): - wday = (_ymd2ord(y, m, d) + 6) % 7 - dnum = _days_before_month(y, m) + d - return _time.struct_time((y, m, d, hh, mm, ss, wday, dnum, dstflag)) - -def _format_time(hh, mm, ss, us, timespec='auto'): - specs = { - 'hours': '{:02d}', - 'minutes': '{:02d}:{:02d}', - 'seconds': '{:02d}:{:02d}:{:02d}', - 'milliseconds': '{:02d}:{:02d}:{:02d}.{:03d}', - 'microseconds': '{:02d}:{:02d}:{:02d}.{:06d}' - } - - if timespec == 'auto': - # Skip trailing microseconds when us==0. - timespec = 'microseconds' if us else 'seconds' - elif timespec == 'milliseconds': - us //= 1000 - try: - fmt = specs[timespec] - except KeyError: - raise ValueError('Unknown timespec value') - else: - return fmt.format(hh, mm, ss, us) - -def _format_offset(off, sep=':'): - s = '' - if off is not None: - if off.days < 0: - sign = "-" - off = -off - else: - sign = "+" - hh, mm = divmod(off, timedelta(hours=1)) - mm, ss = divmod(mm, timedelta(minutes=1)) - s += "%s%02d%s%02d" % (sign, hh, sep, mm) - if ss or ss.microseconds: - s += "%s%02d" % (sep, ss.seconds) - - if ss.microseconds: - s += '.%06d' % ss.microseconds - return s - -# Correctly substitute for %z and %Z escapes in strftime formats. -def _wrap_strftime(object, format, timetuple): - # Don't call utcoffset() or tzname() unless actually needed. - freplace = None # the string to use for %f - zreplace = None # the string to use for %z - colonzreplace = None # the string to use for %:z - Zreplace = None # the string to use for %Z - - # Scan format for %z, %:z and %Z escapes, replacing as needed. - newformat = [] - push = newformat.append - i, n = 0, len(format) - while i < n: - ch = format[i] - i += 1 - if ch == '%': - if i < n: - ch = format[i] - i += 1 - if ch == 'f': - if freplace is None: - freplace = '%06d' % getattr(object, - 'microsecond', 0) - newformat.append(freplace) - elif ch == 'z': - if zreplace is None: - if hasattr(object, "utcoffset"): - zreplace = _format_offset(object.utcoffset(), sep="") - else: - zreplace = "" - assert '%' not in zreplace - newformat.append(zreplace) - elif ch == ':': - if i < n: - ch2 = format[i] - i += 1 - if ch2 == 'z': - if colonzreplace is None: - if hasattr(object, "utcoffset"): - colonzreplace = _format_offset(object.utcoffset(), sep=":") - else: - colonzreplace = "" - assert '%' not in colonzreplace - newformat.append(colonzreplace) - else: - push('%') - push(ch) - push(ch2) - elif ch == 'Z': - if Zreplace is None: - Zreplace = "" - if hasattr(object, "tzname"): - s = object.tzname() - if s is not None: - # strftime is going to have at this: escape % - Zreplace = s.replace('%', '%%') - newformat.append(Zreplace) - else: - push('%') - push(ch) - else: - push('%') - else: - push(ch) - newformat = "".join(newformat) - return _time.strftime(newformat, timetuple) - -# Helpers for parsing the result of isoformat() -def _is_ascii_digit(c): - return c in "0123456789" - -def _find_isoformat_datetime_separator(dtstr): - # See the comment in _datetimemodule.c:_find_isoformat_datetime_separator - len_dtstr = len(dtstr) - if len_dtstr == 7: - return 7 - - assert len_dtstr > 7 - date_separator = "-" - week_indicator = "W" - - if dtstr[4] == date_separator: - if dtstr[5] == week_indicator: - if len_dtstr < 8: - raise ValueError("Invalid ISO string") - if len_dtstr > 8 and dtstr[8] == date_separator: - if len_dtstr == 9: - raise ValueError("Invalid ISO string") - if len_dtstr > 10 and _is_ascii_digit(dtstr[10]): - # This is as far as we need to resolve the ambiguity for - # the moment - if we have YYYY-Www-##, the separator is - # either a hyphen at 8 or a number at 10. - # - # We'll assume it's a hyphen at 8 because it's way more - # likely that someone will use a hyphen as a separator than - # a number, but at this point it's really best effort - # because this is an extension of the spec anyway. - # TODO(pganssle): Document this - return 8 - return 10 - else: - # YYYY-Www (8) - return 8 - else: - # YYYY-MM-DD (10) - return 10 - else: - if dtstr[4] == week_indicator: - # YYYYWww (7) or YYYYWwwd (8) - idx = 7 - while idx < len_dtstr: - if not _is_ascii_digit(dtstr[idx]): - break - idx += 1 - - if idx < 9: - return idx - - if idx % 2 == 0: - # If the index of the last number is even, it's YYYYWwwd - return 7 - else: - return 8 - else: - # YYYYMMDD (8) - return 8 - - -def _parse_isoformat_date(dtstr): - # It is assumed that this is an ASCII-only string of lengths 7, 8 or 10, - # see the comment on Modules/_datetimemodule.c:_find_isoformat_datetime_separator - assert len(dtstr) in (7, 8, 10) - year = int(dtstr[0:4]) - has_sep = dtstr[4] == '-' - - pos = 4 + has_sep - if dtstr[pos:pos + 1] == "W": - # YYYY-?Www-?D? - pos += 1 - weekno = int(dtstr[pos:pos + 2]) - pos += 2 - - dayno = 1 - if len(dtstr) > pos: - if (dtstr[pos:pos + 1] == '-') != has_sep: - raise ValueError("Inconsistent use of dash separator") - - pos += has_sep - - dayno = int(dtstr[pos:pos + 1]) - - return list(_isoweek_to_gregorian(year, weekno, dayno)) - else: - month = int(dtstr[pos:pos + 2]) - pos += 2 - if (dtstr[pos:pos + 1] == "-") != has_sep: - raise ValueError("Inconsistent use of dash separator") - - pos += has_sep - day = int(dtstr[pos:pos + 2]) - - return [year, month, day] - - -_FRACTION_CORRECTION = [100000, 10000, 1000, 100, 10] - - -def _parse_hh_mm_ss_ff(tstr): - # Parses things of the form HH[:?MM[:?SS[{.,}fff[fff]]]] - len_str = len(tstr) - - time_comps = [0, 0, 0, 0] - pos = 0 - for comp in range(0, 3): - if (len_str - pos) < 2: - raise ValueError("Incomplete time component") - - time_comps[comp] = int(tstr[pos:pos+2]) - - pos += 2 - next_char = tstr[pos:pos+1] - - if comp == 0: - has_sep = next_char == ':' - - if not next_char or comp >= 2: - break - - if has_sep and next_char != ':': - raise ValueError("Invalid time separator: %c" % next_char) - - pos += has_sep - - if pos < len_str: - if tstr[pos] not in '.,': - raise ValueError("Invalid microsecond component") - else: - pos += 1 - - len_remainder = len_str - pos - - if len_remainder >= 6: - to_parse = 6 - else: - to_parse = len_remainder - - time_comps[3] = int(tstr[pos:(pos+to_parse)]) - if to_parse < 6: - time_comps[3] *= _FRACTION_CORRECTION[to_parse-1] - if (len_remainder > to_parse - and not all(map(_is_ascii_digit, tstr[(pos+to_parse):]))): - raise ValueError("Non-digit values in unparsed fraction") - - return time_comps - -def _parse_isoformat_time(tstr): - # Format supported is HH[:MM[:SS[.fff[fff]]]][+HH:MM[:SS[.ffffff]]] - len_str = len(tstr) - if len_str < 2: - raise ValueError("Isoformat time too short") - - # This is equivalent to re.search('[+-Z]', tstr), but faster - tz_pos = (tstr.find('-') + 1 or tstr.find('+') + 1 or tstr.find('Z') + 1) - timestr = tstr[:tz_pos-1] if tz_pos > 0 else tstr - - time_comps = _parse_hh_mm_ss_ff(timestr) - - tzi = None - if tz_pos == len_str and tstr[-1] == 'Z': - tzi = timezone.utc - elif tz_pos > 0: - tzstr = tstr[tz_pos:] - - # Valid time zone strings are: - # HH len: 2 - # HHMM len: 4 - # HH:MM len: 5 - # HHMMSS len: 6 - # HHMMSS.f+ len: 7+ - # HH:MM:SS len: 8 - # HH:MM:SS.f+ len: 10+ - - if len(tzstr) in (0, 1, 3): - raise ValueError("Malformed time zone string") - - tz_comps = _parse_hh_mm_ss_ff(tzstr) - - if all(x == 0 for x in tz_comps): - tzi = timezone.utc - else: - tzsign = -1 if tstr[tz_pos - 1] == '-' else 1 - - td = timedelta(hours=tz_comps[0], minutes=tz_comps[1], - seconds=tz_comps[2], microseconds=tz_comps[3]) - - tzi = timezone(tzsign * td) - - time_comps.append(tzi) - - return time_comps - -# tuple[int, int, int] -> tuple[int, int, int] version of date.fromisocalendar -def _isoweek_to_gregorian(year, week, day): - # Year is bounded this way because 9999-12-31 is (9999, 52, 5) - if not MINYEAR <= year <= MAXYEAR: - raise ValueError(f"Year is out of range: {year}") - - if not 0 < week < 53: - out_of_range = True - - if week == 53: - # ISO years have 53 weeks in them on years starting with a - # Thursday and leap years starting on a Wednesday - first_weekday = _ymd2ord(year, 1, 1) % 7 - if (first_weekday == 4 or (first_weekday == 3 and - _is_leap(year))): - out_of_range = False - - if out_of_range: - raise ValueError(f"Invalid week: {week}") - - if not 0 < day < 8: - raise ValueError(f"Invalid weekday: {day} (range is [1, 7])") - - # Now compute the offset from (Y, 1, 1) in days: - day_offset = (week - 1) * 7 + (day - 1) - - # Calculate the ordinal day for monday, week 1 - day_1 = _isoweek1monday(year) - ord_day = day_1 + day_offset - - return _ord2ymd(ord_day) - - -# Just raise TypeError if the arg isn't None or a string. -def _check_tzname(name): - if name is not None and not isinstance(name, str): - raise TypeError("tzinfo.tzname() must return None or string, " - "not '%s'" % type(name)) - -# name is the offset-producing method, "utcoffset" or "dst". -# offset is what it returned. -# If offset isn't None or timedelta, raises TypeError. -# If offset is None, returns None. -# Else offset is checked for being in range. -# If it is, its integer value is returned. Else ValueError is raised. -def _check_utc_offset(name, offset): - assert name in ("utcoffset", "dst") - if offset is None: - return - if not isinstance(offset, timedelta): - raise TypeError("tzinfo.%s() must return None " - "or timedelta, not '%s'" % (name, type(offset))) - if not -timedelta(1) < offset < timedelta(1): - raise ValueError("%s()=%s, must be strictly between " - "-timedelta(hours=24) and timedelta(hours=24)" % - (name, offset)) - -def _check_date_fields(year, month, day): - year = _index(year) - month = _index(month) - day = _index(day) - if not MINYEAR <= year <= MAXYEAR: - raise ValueError('year must be in %d..%d' % (MINYEAR, MAXYEAR), year) - if not 1 <= month <= 12: - raise ValueError('month must be in 1..12', month) - dim = _days_in_month(year, month) - if not 1 <= day <= dim: - raise ValueError('day must be in 1..%d' % dim, day) - return year, month, day - -def _check_time_fields(hour, minute, second, microsecond, fold): - hour = _index(hour) - minute = _index(minute) - second = _index(second) - microsecond = _index(microsecond) - if not 0 <= hour <= 23: - raise ValueError('hour must be in 0..23', hour) - if not 0 <= minute <= 59: - raise ValueError('minute must be in 0..59', minute) - if not 0 <= second <= 59: - raise ValueError('second must be in 0..59', second) - if not 0 <= microsecond <= 999999: - raise ValueError('microsecond must be in 0..999999', microsecond) - if fold not in (0, 1): - raise ValueError('fold must be either 0 or 1', fold) - return hour, minute, second, microsecond, fold - -def _check_tzinfo_arg(tz): - if tz is not None and not isinstance(tz, tzinfo): - raise TypeError("tzinfo argument must be None or of a tzinfo subclass") - -def _cmperror(x, y): - raise TypeError("can't compare '%s' to '%s'" % ( - type(x).__name__, type(y).__name__)) - -def _divide_and_round(a, b): - """divide a by b and round result to the nearest integer - - When the ratio is exactly half-way between two integers, - the even integer is returned. - """ - # Based on the reference implementation for divmod_near - # in Objects/longobject.c. - q, r = divmod(a, b) - # round up if either r / b > 0.5, or r / b == 0.5 and q is odd. - # The expression r / b > 0.5 is equivalent to 2 * r > b if b is - # positive, 2 * r < b if b negative. - r *= 2 - greater_than_half = r > b if b > 0 else r < b - if greater_than_half or r == b and q % 2 == 1: - q += 1 - - return q - - -class timedelta: - """Represent the difference between two datetime objects. - - Supported operators: - - - add, subtract timedelta - - unary plus, minus, abs - - compare to timedelta - - multiply, divide by int - - In addition, datetime supports subtraction of two datetime objects - returning a timedelta, and addition or subtraction of a datetime - and a timedelta giving a datetime. - - Representation: (days, seconds, microseconds). - """ - # The representation of (days, seconds, microseconds) was chosen - # arbitrarily; the exact rationale originally specified in the docstring - # was "Because I felt like it." - - __slots__ = '_days', '_seconds', '_microseconds', '_hashcode' - - def __new__(cls, days=0, seconds=0, microseconds=0, - milliseconds=0, minutes=0, hours=0, weeks=0): - # Doing this efficiently and accurately in C is going to be difficult - # and error-prone, due to ubiquitous overflow possibilities, and that - # C double doesn't have enough bits of precision to represent - # microseconds over 10K years faithfully. The code here tries to make - # explicit where go-fast assumptions can be relied on, in order to - # guide the C implementation; it's way more convoluted than speed- - # ignoring auto-overflow-to-long idiomatic Python could be. - - # XXX Check that all inputs are ints or floats. - - # Final values, all integer. - # s and us fit in 32-bit signed ints; d isn't bounded. - d = s = us = 0 - - # Normalize everything to days, seconds, microseconds. - days += weeks*7 - seconds += minutes*60 + hours*3600 - microseconds += milliseconds*1000 - - # Get rid of all fractions, and normalize s and us. - # Take a deep breath . - if isinstance(days, float): - dayfrac, days = _math.modf(days) - daysecondsfrac, daysecondswhole = _math.modf(dayfrac * (24.*3600.)) - assert daysecondswhole == int(daysecondswhole) # can't overflow - s = int(daysecondswhole) - assert days == int(days) - d = int(days) - else: - daysecondsfrac = 0.0 - d = days - assert isinstance(daysecondsfrac, float) - assert abs(daysecondsfrac) <= 1.0 - assert isinstance(d, int) - assert abs(s) <= 24 * 3600 - # days isn't referenced again before redefinition - - if isinstance(seconds, float): - secondsfrac, seconds = _math.modf(seconds) - assert seconds == int(seconds) - seconds = int(seconds) - secondsfrac += daysecondsfrac - assert abs(secondsfrac) <= 2.0 - else: - secondsfrac = daysecondsfrac - # daysecondsfrac isn't referenced again - assert isinstance(secondsfrac, float) - assert abs(secondsfrac) <= 2.0 - - assert isinstance(seconds, int) - days, seconds = divmod(seconds, 24*3600) - d += days - s += int(seconds) # can't overflow - assert isinstance(s, int) - assert abs(s) <= 2 * 24 * 3600 - # seconds isn't referenced again before redefinition - - usdouble = secondsfrac * 1e6 - assert abs(usdouble) < 2.1e6 # exact value not critical - # secondsfrac isn't referenced again - - if isinstance(microseconds, float): - microseconds = round(microseconds + usdouble) - seconds, microseconds = divmod(microseconds, 1000000) - days, seconds = divmod(seconds, 24*3600) - d += days - s += seconds - else: - microseconds = int(microseconds) - seconds, microseconds = divmod(microseconds, 1000000) - days, seconds = divmod(seconds, 24*3600) - d += days - s += seconds - microseconds = round(microseconds + usdouble) - assert isinstance(s, int) - assert isinstance(microseconds, int) - assert abs(s) <= 3 * 24 * 3600 - assert abs(microseconds) < 3.1e6 - - # Just a little bit of carrying possible for microseconds and seconds. - seconds, us = divmod(microseconds, 1000000) - s += seconds - days, s = divmod(s, 24*3600) - d += days - - assert isinstance(d, int) - assert isinstance(s, int) and 0 <= s < 24*3600 - assert isinstance(us, int) and 0 <= us < 1000000 - - if abs(d) > 999999999: - raise OverflowError("timedelta # of days is too large: %d" % d) - - self = object.__new__(cls) - self._days = d - self._seconds = s - self._microseconds = us - self._hashcode = -1 - return self - - def __repr__(self): - args = [] - if self._days: - args.append("days=%d" % self._days) - if self._seconds: - args.append("seconds=%d" % self._seconds) - if self._microseconds: - args.append("microseconds=%d" % self._microseconds) - if not args: - args.append('0') - return "%s.%s(%s)" % (self.__class__.__module__, - self.__class__.__qualname__, - ', '.join(args)) - - def __str__(self): - mm, ss = divmod(self._seconds, 60) - hh, mm = divmod(mm, 60) - s = "%d:%02d:%02d" % (hh, mm, ss) - if self._days: - def plural(n): - return n, abs(n) != 1 and "s" or "" - s = ("%d day%s, " % plural(self._days)) + s - if self._microseconds: - s = s + ".%06d" % self._microseconds - return s - - def total_seconds(self): - """Total seconds in the duration.""" - return ((self.days * 86400 + self.seconds) * 10**6 + - self.microseconds) / 10**6 - - # Read-only field accessors - @property - def days(self): - """days""" - return self._days - - @property - def seconds(self): - """seconds""" - return self._seconds - - @property - def microseconds(self): - """microseconds""" - return self._microseconds - - def __add__(self, other): - if isinstance(other, timedelta): - # for CPython compatibility, we cannot use - # our __class__ here, but need a real timedelta - return timedelta(self._days + other._days, - self._seconds + other._seconds, - self._microseconds + other._microseconds) - return NotImplemented - - __radd__ = __add__ - - def __sub__(self, other): - if isinstance(other, timedelta): - # for CPython compatibility, we cannot use - # our __class__ here, but need a real timedelta - return timedelta(self._days - other._days, - self._seconds - other._seconds, - self._microseconds - other._microseconds) - return NotImplemented - - def __rsub__(self, other): - if isinstance(other, timedelta): - return -self + other - return NotImplemented - - def __neg__(self): - # for CPython compatibility, we cannot use - # our __class__ here, but need a real timedelta - return timedelta(-self._days, - -self._seconds, - -self._microseconds) - - def __pos__(self): - return self - - def __abs__(self): - if self._days < 0: - return -self - else: - return self - - def __mul__(self, other): - if isinstance(other, int): - # for CPython compatibility, we cannot use - # our __class__ here, but need a real timedelta - return timedelta(self._days * other, - self._seconds * other, - self._microseconds * other) - if isinstance(other, float): - usec = self._to_microseconds() - a, b = other.as_integer_ratio() - return timedelta(0, 0, _divide_and_round(usec * a, b)) - return NotImplemented - - __rmul__ = __mul__ - - def _to_microseconds(self): - return ((self._days * (24*3600) + self._seconds) * 1000000 + - self._microseconds) - - def __floordiv__(self, other): - if not isinstance(other, (int, timedelta)): - return NotImplemented - usec = self._to_microseconds() - if isinstance(other, timedelta): - return usec // other._to_microseconds() - if isinstance(other, int): - return timedelta(0, 0, usec // other) - - def __truediv__(self, other): - if not isinstance(other, (int, float, timedelta)): - return NotImplemented - usec = self._to_microseconds() - if isinstance(other, timedelta): - return usec / other._to_microseconds() - if isinstance(other, int): - return timedelta(0, 0, _divide_and_round(usec, other)) - if isinstance(other, float): - a, b = other.as_integer_ratio() - return timedelta(0, 0, _divide_and_round(b * usec, a)) - - def __mod__(self, other): - if isinstance(other, timedelta): - r = self._to_microseconds() % other._to_microseconds() - return timedelta(0, 0, r) - return NotImplemented - - def __divmod__(self, other): - if isinstance(other, timedelta): - q, r = divmod(self._to_microseconds(), - other._to_microseconds()) - return q, timedelta(0, 0, r) - return NotImplemented - - # Comparisons of timedelta objects with other. - - def __eq__(self, other): - if isinstance(other, timedelta): - return self._cmp(other) == 0 - else: - return NotImplemented - - def __le__(self, other): - if isinstance(other, timedelta): - return self._cmp(other) <= 0 - else: - return NotImplemented - - def __lt__(self, other): - if isinstance(other, timedelta): - return self._cmp(other) < 0 - else: - return NotImplemented - - def __ge__(self, other): - if isinstance(other, timedelta): - return self._cmp(other) >= 0 - else: - return NotImplemented - - def __gt__(self, other): - if isinstance(other, timedelta): - return self._cmp(other) > 0 - else: - return NotImplemented - - def _cmp(self, other): - assert isinstance(other, timedelta) - return _cmp(self._getstate(), other._getstate()) - - def __hash__(self): - if self._hashcode == -1: - self._hashcode = hash(self._getstate()) - return self._hashcode - - def __bool__(self): - return (self._days != 0 or - self._seconds != 0 or - self._microseconds != 0) - - # Pickle support. - - def _getstate(self): - return (self._days, self._seconds, self._microseconds) - - def __reduce__(self): - return (self.__class__, self._getstate()) - -timedelta.min = timedelta(-999999999) -timedelta.max = timedelta(days=999999999, hours=23, minutes=59, seconds=59, - microseconds=999999) -timedelta.resolution = timedelta(microseconds=1) - -class date: - """Concrete date type. - - Constructors: - - __new__() - fromtimestamp() - today() - fromordinal() - - Operators: - - __repr__, __str__ - __eq__, __le__, __lt__, __ge__, __gt__, __hash__ - __add__, __radd__, __sub__ (add/radd only with timedelta arg) - - Methods: - - timetuple() - toordinal() - weekday() - isoweekday(), isocalendar(), isoformat() - ctime() - strftime() - - Properties (readonly): - year, month, day - """ - __slots__ = '_year', '_month', '_day', '_hashcode' - - def __new__(cls, year, month=None, day=None): - """Constructor. - - Arguments: - - year, month, day (required, base 1) - """ - if (month is None and - isinstance(year, (bytes, str)) and len(year) == 4 and - 1 <= ord(year[2:3]) <= 12): - # Pickle support - if isinstance(year, str): - try: - year = year.encode('latin1') - except UnicodeEncodeError: - # More informative error message. - raise ValueError( - "Failed to encode latin1 string when unpickling " - "a date object. " - "pickle.load(data, encoding='latin1') is assumed.") - self = object.__new__(cls) - self.__setstate(year) - self._hashcode = -1 - return self - year, month, day = _check_date_fields(year, month, day) - self = object.__new__(cls) - self._year = year - self._month = month - self._day = day - self._hashcode = -1 - return self - - # Additional constructors - - @classmethod - def fromtimestamp(cls, t): - "Construct a date from a POSIX timestamp (like time.time())." - y, m, d, hh, mm, ss, weekday, jday, dst = _time.localtime(t) - return cls(y, m, d) - - @classmethod - def today(cls): - "Construct a date from time.time()." - t = _time.time() - return cls.fromtimestamp(t) - - @classmethod - def fromordinal(cls, n): - """Construct a date from a proleptic Gregorian ordinal. - - January 1 of year 1 is day 1. Only the year, month and day are - non-zero in the result. - """ - y, m, d = _ord2ymd(n) - return cls(y, m, d) - - @classmethod - def fromisoformat(cls, date_string): - """Construct a date from a string in ISO 8601 format.""" - if not isinstance(date_string, str): - raise TypeError('fromisoformat: argument must be str') - - if len(date_string) not in (7, 8, 10): - raise ValueError(f'Invalid isoformat string: {date_string!r}') - - try: - return cls(*_parse_isoformat_date(date_string)) - except Exception: - raise ValueError(f'Invalid isoformat string: {date_string!r}') - - @classmethod - def fromisocalendar(cls, year, week, day): - """Construct a date from the ISO year, week number and weekday. - - This is the inverse of the date.isocalendar() function""" - return cls(*_isoweek_to_gregorian(year, week, day)) - - # Conversions to string - - def __repr__(self): - """Convert to formal string, for repr(). - - >>> dt = datetime(2010, 1, 1) - >>> repr(dt) - 'datetime.datetime(2010, 1, 1, 0, 0)' - - >>> dt = datetime(2010, 1, 1, tzinfo=timezone.utc) - >>> repr(dt) - 'datetime.datetime(2010, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)' - """ - return "%s.%s(%d, %d, %d)" % (self.__class__.__module__, - self.__class__.__qualname__, - self._year, - self._month, - self._day) - # XXX These shouldn't depend on time.localtime(), because that - # clips the usable dates to [1970 .. 2038). At least ctime() is - # easily done without using strftime() -- that's better too because - # strftime("%c", ...) is locale specific. - - - def ctime(self): - "Return ctime() style string." - weekday = self.toordinal() % 7 or 7 - return "%s %s %2d 00:00:00 %04d" % ( - _DAYNAMES[weekday], - _MONTHNAMES[self._month], - self._day, self._year) - - def strftime(self, format): - """ - Format using strftime(). - - Example: "%d/%m/%Y, %H:%M:%S" - """ - return _wrap_strftime(self, format, self.timetuple()) - - def __format__(self, fmt): - if not isinstance(fmt, str): - raise TypeError("must be str, not %s" % type(fmt).__name__) - if len(fmt) != 0: - return self.strftime(fmt) - return str(self) - - def isoformat(self): - """Return the date formatted according to ISO. - - This is 'YYYY-MM-DD'. - - References: - - http://www.w3.org/TR/NOTE-datetime - - http://www.cl.cam.ac.uk/~mgk25/iso-time.html - """ - return "%04d-%02d-%02d" % (self._year, self._month, self._day) - - __str__ = isoformat - - # Read-only field accessors - @property - def year(self): - """year (1-9999)""" - return self._year - - @property - def month(self): - """month (1-12)""" - return self._month - - @property - def day(self): - """day (1-31)""" - return self._day - - # Standard conversions, __eq__, __le__, __lt__, __ge__, __gt__, - # __hash__ (and helpers) - - def timetuple(self): - "Return local time tuple compatible with time.localtime()." - return _build_struct_time(self._year, self._month, self._day, - 0, 0, 0, -1) - - def toordinal(self): - """Return proleptic Gregorian ordinal for the year, month and day. - - January 1 of year 1 is day 1. Only the year, month and day values - contribute to the result. - """ - return _ymd2ord(self._year, self._month, self._day) - - def replace(self, year=None, month=None, day=None): - """Return a new date with new values for the specified fields.""" - if year is None: - year = self._year - if month is None: - month = self._month - if day is None: - day = self._day - return type(self)(year, month, day) - - # Comparisons of date objects with other. - - def __eq__(self, other): - if isinstance(other, date): - return self._cmp(other) == 0 - return NotImplemented - - def __le__(self, other): - if isinstance(other, date): - return self._cmp(other) <= 0 - return NotImplemented - - def __lt__(self, other): - if isinstance(other, date): - return self._cmp(other) < 0 - return NotImplemented - - def __ge__(self, other): - if isinstance(other, date): - return self._cmp(other) >= 0 - return NotImplemented - - def __gt__(self, other): - if isinstance(other, date): - return self._cmp(other) > 0 - return NotImplemented - - def _cmp(self, other): - assert isinstance(other, date) - y, m, d = self._year, self._month, self._day - y2, m2, d2 = other._year, other._month, other._day - return _cmp((y, m, d), (y2, m2, d2)) - - def __hash__(self): - "Hash." - if self._hashcode == -1: - self._hashcode = hash(self._getstate()) - return self._hashcode - - # Computations - - def __add__(self, other): - "Add a date to a timedelta." - if isinstance(other, timedelta): - o = self.toordinal() + other.days - if 0 < o <= _MAXORDINAL: - return type(self).fromordinal(o) - raise OverflowError("result out of range") - return NotImplemented - - __radd__ = __add__ - - def __sub__(self, other): - """Subtract two dates, or a date and a timedelta.""" - if isinstance(other, timedelta): - return self + timedelta(-other.days) - if isinstance(other, date): - days1 = self.toordinal() - days2 = other.toordinal() - return timedelta(days1 - days2) - return NotImplemented - - def weekday(self): - "Return day of the week, where Monday == 0 ... Sunday == 6." - return (self.toordinal() + 6) % 7 - - # Day-of-the-week and week-of-the-year, according to ISO - - def isoweekday(self): - "Return day of the week, where Monday == 1 ... Sunday == 7." - # 1-Jan-0001 is a Monday - return self.toordinal() % 7 or 7 - - def isocalendar(self): - """Return a named tuple containing ISO year, week number, and weekday. - - The first ISO week of the year is the (Mon-Sun) week - containing the year's first Thursday; everything else derives - from that. - - The first week is 1; Monday is 1 ... Sunday is 7. - - ISO calendar algorithm taken from - http://www.phys.uu.nl/~vgent/calendar/isocalendar.htm - (used with permission) - """ - year = self._year - week1monday = _isoweek1monday(year) - today = _ymd2ord(self._year, self._month, self._day) - # Internally, week and day have origin 0 - week, day = divmod(today - week1monday, 7) - if week < 0: - year -= 1 - week1monday = _isoweek1monday(year) - week, day = divmod(today - week1monday, 7) - elif week >= 52: - if today >= _isoweek1monday(year+1): - year += 1 - week = 0 - return _IsoCalendarDate(year, week+1, day+1) - - # Pickle support. - - def _getstate(self): - yhi, ylo = divmod(self._year, 256) - return bytes([yhi, ylo, self._month, self._day]), - - def __setstate(self, string): - yhi, ylo, self._month, self._day = string - self._year = yhi * 256 + ylo - - def __reduce__(self): - return (self.__class__, self._getstate()) - -_date_class = date # so functions w/ args named "date" can get at the class - -date.min = date(1, 1, 1) -date.max = date(9999, 12, 31) -date.resolution = timedelta(days=1) - - -class tzinfo: - """Abstract base class for time zone info classes. - - Subclasses must override the name(), utcoffset() and dst() methods. - """ - __slots__ = () - - def tzname(self, dt): - "datetime -> string name of time zone." - raise NotImplementedError("tzinfo subclass must override tzname()") - - def utcoffset(self, dt): - "datetime -> timedelta, positive for east of UTC, negative for west of UTC" - raise NotImplementedError("tzinfo subclass must override utcoffset()") - - def dst(self, dt): - """datetime -> DST offset as timedelta, positive for east of UTC. - - Return 0 if DST not in effect. utcoffset() must include the DST - offset. - """ - raise NotImplementedError("tzinfo subclass must override dst()") - - def fromutc(self, dt): - "datetime in UTC -> datetime in local time." - - if not isinstance(dt, datetime): - raise TypeError("fromutc() requires a datetime argument") - if dt.tzinfo is not self: - raise ValueError("dt.tzinfo is not self") - - dtoff = dt.utcoffset() - if dtoff is None: - raise ValueError("fromutc() requires a non-None utcoffset() " - "result") - - # See the long comment block at the end of this file for an - # explanation of this algorithm. - dtdst = dt.dst() - if dtdst is None: - raise ValueError("fromutc() requires a non-None dst() result") - delta = dtoff - dtdst - if delta: - dt += delta - dtdst = dt.dst() - if dtdst is None: - raise ValueError("fromutc(): dt.dst gave inconsistent " - "results; cannot convert") - return dt + dtdst - - # Pickle support. - - def __reduce__(self): - getinitargs = getattr(self, "__getinitargs__", None) - if getinitargs: - args = getinitargs() - else: - args = () - return (self.__class__, args, self.__getstate__()) - - -class IsoCalendarDate(tuple): - - def __new__(cls, year, week, weekday, /): - return super().__new__(cls, (year, week, weekday)) - - @property - def year(self): - return self[0] - - @property - def week(self): - return self[1] - - @property - def weekday(self): - return self[2] - - def __reduce__(self): - # This code is intended to pickle the object without making the - # class public. See https://bugs.python.org/msg352381 - return (tuple, (tuple(self),)) - - def __repr__(self): - return (f'{self.__class__.__name__}' - f'(year={self[0]}, week={self[1]}, weekday={self[2]})') - - -_IsoCalendarDate = IsoCalendarDate -del IsoCalendarDate -_tzinfo_class = tzinfo - -class time: - """Time with time zone. - - Constructors: - - __new__() - - Operators: - - __repr__, __str__ - __eq__, __le__, __lt__, __ge__, __gt__, __hash__ - - Methods: - - strftime() - isoformat() - utcoffset() - tzname() - dst() - - Properties (readonly): - hour, minute, second, microsecond, tzinfo, fold - """ - __slots__ = '_hour', '_minute', '_second', '_microsecond', '_tzinfo', '_hashcode', '_fold' - - def __new__(cls, hour=0, minute=0, second=0, microsecond=0, tzinfo=None, *, fold=0): - """Constructor. - - Arguments: - - hour, minute (required) - second, microsecond (default to zero) - tzinfo (default to None) - fold (keyword only, default to zero) - """ - if (isinstance(hour, (bytes, str)) and len(hour) == 6 and - ord(hour[0:1])&0x7F < 24): - # Pickle support - if isinstance(hour, str): - try: - hour = hour.encode('latin1') - except UnicodeEncodeError: - # More informative error message. - raise ValueError( - "Failed to encode latin1 string when unpickling " - "a time object. " - "pickle.load(data, encoding='latin1') is assumed.") - self = object.__new__(cls) - self.__setstate(hour, minute or None) - self._hashcode = -1 - return self - hour, minute, second, microsecond, fold = _check_time_fields( - hour, minute, second, microsecond, fold) - _check_tzinfo_arg(tzinfo) - self = object.__new__(cls) - self._hour = hour - self._minute = minute - self._second = second - self._microsecond = microsecond - self._tzinfo = tzinfo - self._hashcode = -1 - self._fold = fold - return self - - # Read-only field accessors - @property - def hour(self): - """hour (0-23)""" - return self._hour - - @property - def minute(self): - """minute (0-59)""" - return self._minute - - @property - def second(self): - """second (0-59)""" - return self._second - - @property - def microsecond(self): - """microsecond (0-999999)""" - return self._microsecond - - @property - def tzinfo(self): - """timezone info object""" - return self._tzinfo - - @property - def fold(self): - return self._fold - - # Standard conversions, __hash__ (and helpers) - - # Comparisons of time objects with other. - - def __eq__(self, other): - if isinstance(other, time): - return self._cmp(other, allow_mixed=True) == 0 - else: - return NotImplemented - - def __le__(self, other): - if isinstance(other, time): - return self._cmp(other) <= 0 - else: - return NotImplemented - - def __lt__(self, other): - if isinstance(other, time): - return self._cmp(other) < 0 - else: - return NotImplemented - - def __ge__(self, other): - if isinstance(other, time): - return self._cmp(other) >= 0 - else: - return NotImplemented - - def __gt__(self, other): - if isinstance(other, time): - return self._cmp(other) > 0 - else: - return NotImplemented - - def _cmp(self, other, allow_mixed=False): - assert isinstance(other, time) - mytz = self._tzinfo - ottz = other._tzinfo - myoff = otoff = None - - if mytz is ottz: - base_compare = True - else: - myoff = self.utcoffset() - otoff = other.utcoffset() - base_compare = myoff == otoff - - if base_compare: - return _cmp((self._hour, self._minute, self._second, - self._microsecond), - (other._hour, other._minute, other._second, - other._microsecond)) - if myoff is None or otoff is None: - if allow_mixed: - return 2 # arbitrary non-zero value - else: - raise TypeError("cannot compare naive and aware times") - myhhmm = self._hour * 60 + self._minute - myoff//timedelta(minutes=1) - othhmm = other._hour * 60 + other._minute - otoff//timedelta(minutes=1) - return _cmp((myhhmm, self._second, self._microsecond), - (othhmm, other._second, other._microsecond)) - - def __hash__(self): - """Hash.""" - if self._hashcode == -1: - if self.fold: - t = self.replace(fold=0) - else: - t = self - tzoff = t.utcoffset() - if not tzoff: # zero or None - self._hashcode = hash(t._getstate()[0]) - else: - h, m = divmod(timedelta(hours=self.hour, minutes=self.minute) - tzoff, - timedelta(hours=1)) - assert not m % timedelta(minutes=1), "whole minute" - m //= timedelta(minutes=1) - if 0 <= h < 24: - self._hashcode = hash(time(h, m, self.second, self.microsecond)) - else: - self._hashcode = hash((h, m, self.second, self.microsecond)) - return self._hashcode - - # Conversion to string - - def _tzstr(self): - """Return formatted timezone offset (+xx:xx) or an empty string.""" - off = self.utcoffset() - return _format_offset(off) - - def __repr__(self): - """Convert to formal string, for repr().""" - if self._microsecond != 0: - s = ", %d, %d" % (self._second, self._microsecond) - elif self._second != 0: - s = ", %d" % self._second - else: - s = "" - s= "%s.%s(%d, %d%s)" % (self.__class__.__module__, - self.__class__.__qualname__, - self._hour, self._minute, s) - if self._tzinfo is not None: - assert s[-1:] == ")" - s = s[:-1] + ", tzinfo=%r" % self._tzinfo + ")" - if self._fold: - assert s[-1:] == ")" - s = s[:-1] + ", fold=1)" - return s - - def isoformat(self, timespec='auto'): - """Return the time formatted according to ISO. - - The full format is 'HH:MM:SS.mmmmmm+zz:zz'. By default, the fractional - part is omitted if self.microsecond == 0. - - The optional argument timespec specifies the number of additional - terms of the time to include. Valid options are 'auto', 'hours', - 'minutes', 'seconds', 'milliseconds' and 'microseconds'. - """ - s = _format_time(self._hour, self._minute, self._second, - self._microsecond, timespec) - tz = self._tzstr() - if tz: - s += tz - return s - - __str__ = isoformat - - @classmethod - def fromisoformat(cls, time_string): - """Construct a time from a string in one of the ISO 8601 formats.""" - if not isinstance(time_string, str): - raise TypeError('fromisoformat: argument must be str') - - # The spec actually requires that time-only ISO 8601 strings start with - # T, but the extended format allows this to be omitted as long as there - # is no ambiguity with date strings. - time_string = time_string.removeprefix('T') - - try: - return cls(*_parse_isoformat_time(time_string)) - except Exception: - raise ValueError(f'Invalid isoformat string: {time_string!r}') - - def strftime(self, format): - """Format using strftime(). The date part of the timestamp passed - to underlying strftime should not be used. - """ - # The year must be >= 1000 else Python's strftime implementation - # can raise a bogus exception. - timetuple = (1900, 1, 1, - self._hour, self._minute, self._second, - 0, 1, -1) - return _wrap_strftime(self, format, timetuple) - - def __format__(self, fmt): - if not isinstance(fmt, str): - raise TypeError("must be str, not %s" % type(fmt).__name__) - if len(fmt) != 0: - return self.strftime(fmt) - return str(self) - - # Timezone functions - - def utcoffset(self): - """Return the timezone offset as timedelta, positive east of UTC - (negative west of UTC).""" - if self._tzinfo is None: - return None - offset = self._tzinfo.utcoffset(None) - _check_utc_offset("utcoffset", offset) - return offset - - def tzname(self): - """Return the timezone name. - - Note that the name is 100% informational -- there's no requirement that - it mean anything in particular. For example, "GMT", "UTC", "-500", - "-5:00", "EDT", "US/Eastern", "America/New York" are all valid replies. - """ - if self._tzinfo is None: - return None - name = self._tzinfo.tzname(None) - _check_tzname(name) - return name - - def dst(self): - """Return 0 if DST is not in effect, or the DST offset (as timedelta - positive eastward) if DST is in effect. - - This is purely informational; the DST offset has already been added to - the UTC offset returned by utcoffset() if applicable, so there's no - need to consult dst() unless you're interested in displaying the DST - info. - """ - if self._tzinfo is None: - return None - offset = self._tzinfo.dst(None) - _check_utc_offset("dst", offset) - return offset - - def replace(self, hour=None, minute=None, second=None, microsecond=None, - tzinfo=True, *, fold=None): - """Return a new time with new values for the specified fields.""" - if hour is None: - hour = self.hour - if minute is None: - minute = self.minute - if second is None: - second = self.second - if microsecond is None: - microsecond = self.microsecond - if tzinfo is True: - tzinfo = self.tzinfo - if fold is None: - fold = self._fold - return type(self)(hour, minute, second, microsecond, tzinfo, fold=fold) - - # Pickle support. - - def _getstate(self, protocol=3): - us2, us3 = divmod(self._microsecond, 256) - us1, us2 = divmod(us2, 256) - h = self._hour - if self._fold and protocol > 3: - h += 128 - basestate = bytes([h, self._minute, self._second, - us1, us2, us3]) - if self._tzinfo is None: - return (basestate,) - else: - return (basestate, self._tzinfo) - - def __setstate(self, string, tzinfo): - if tzinfo is not None and not isinstance(tzinfo, _tzinfo_class): - raise TypeError("bad tzinfo state arg") - h, self._minute, self._second, us1, us2, us3 = string - if h > 127: - self._fold = 1 - self._hour = h - 128 - else: - self._fold = 0 - self._hour = h - self._microsecond = (((us1 << 8) | us2) << 8) | us3 - self._tzinfo = tzinfo - - def __reduce_ex__(self, protocol): - return (self.__class__, self._getstate(protocol)) - - def __reduce__(self): - return self.__reduce_ex__(2) - -_time_class = time # so functions w/ args named "time" can get at the class - -time.min = time(0, 0, 0) -time.max = time(23, 59, 59, 999999) -time.resolution = timedelta(microseconds=1) - - -class datetime(date): - """datetime(year, month, day[, hour[, minute[, second[, microsecond[,tzinfo]]]]]) - - The year, month and day arguments are required. tzinfo may be None, or an - instance of a tzinfo subclass. The remaining arguments may be ints. - """ - __slots__ = date.__slots__ + time.__slots__ - - def __new__(cls, year, month=None, day=None, hour=0, minute=0, second=0, - microsecond=0, tzinfo=None, *, fold=0): - if (isinstance(year, (bytes, str)) and len(year) == 10 and - 1 <= ord(year[2:3])&0x7F <= 12): - # Pickle support - if isinstance(year, str): - try: - year = bytes(year, 'latin1') - except UnicodeEncodeError: - # More informative error message. - raise ValueError( - "Failed to encode latin1 string when unpickling " - "a datetime object. " - "pickle.load(data, encoding='latin1') is assumed.") - self = object.__new__(cls) - self.__setstate(year, month) - self._hashcode = -1 - return self - year, month, day = _check_date_fields(year, month, day) - hour, minute, second, microsecond, fold = _check_time_fields( - hour, minute, second, microsecond, fold) - _check_tzinfo_arg(tzinfo) - self = object.__new__(cls) - self._year = year - self._month = month - self._day = day - self._hour = hour - self._minute = minute - self._second = second - self._microsecond = microsecond - self._tzinfo = tzinfo - self._hashcode = -1 - self._fold = fold - return self - - # Read-only field accessors - @property - def hour(self): - """hour (0-23)""" - return self._hour - - @property - def minute(self): - """minute (0-59)""" - return self._minute - - @property - def second(self): - """second (0-59)""" - return self._second - - @property - def microsecond(self): - """microsecond (0-999999)""" - return self._microsecond - - @property - def tzinfo(self): - """timezone info object""" - return self._tzinfo - - @property - def fold(self): - return self._fold - - @classmethod - def _fromtimestamp(cls, t, utc, tz): - """Construct a datetime from a POSIX timestamp (like time.time()). - - A timezone info object may be passed in as well. - """ - frac, t = _math.modf(t) - us = round(frac * 1e6) - if us >= 1000000: - t += 1 - us -= 1000000 - elif us < 0: - t -= 1 - us += 1000000 - - converter = _time.gmtime if utc else _time.localtime - y, m, d, hh, mm, ss, weekday, jday, dst = converter(t) - ss = min(ss, 59) # clamp out leap seconds if the platform has them - result = cls(y, m, d, hh, mm, ss, us, tz) - if tz is None and not utc: - # As of version 2015f max fold in IANA database is - # 23 hours at 1969-09-30 13:00:00 in Kwajalein. - # Let's probe 24 hours in the past to detect a transition: - max_fold_seconds = 24 * 3600 - - # On Windows localtime_s throws an OSError for negative values, - # thus we can't perform fold detection for values of time less - # than the max time fold. See comments in _datetimemodule's - # version of this method for more details. - if t < max_fold_seconds and sys.platform.startswith("win"): - return result - - y, m, d, hh, mm, ss = converter(t - max_fold_seconds)[:6] - probe1 = cls(y, m, d, hh, mm, ss, us, tz) - trans = result - probe1 - timedelta(0, max_fold_seconds) - if trans.days < 0: - y, m, d, hh, mm, ss = converter(t + trans // timedelta(0, 1))[:6] - probe2 = cls(y, m, d, hh, mm, ss, us, tz) - if probe2 == result: - result._fold = 1 - elif tz is not None: - result = tz.fromutc(result) - return result - - @classmethod - def fromtimestamp(cls, timestamp, tz=None): - """Construct a datetime from a POSIX timestamp (like time.time()). - - A timezone info object may be passed in as well. - """ - _check_tzinfo_arg(tz) - - return cls._fromtimestamp(timestamp, tz is not None, tz) - - @classmethod - def utcfromtimestamp(cls, t): - """Construct a naive UTC datetime from a POSIX timestamp.""" - import warnings - warnings.warn("datetime.utcfromtimestamp() is deprecated and scheduled " - "for removal in a future version. Use timezone-aware " - "objects to represent datetimes in UTC: " - "datetime.fromtimestamp(t, datetime.UTC).", - DeprecationWarning, - stacklevel=2) - return cls._fromtimestamp(t, True, None) - - @classmethod - def now(cls, tz=None): - "Construct a datetime from time.time() and optional time zone info." - t = _time.time() - return cls.fromtimestamp(t, tz) - - @classmethod - def utcnow(cls): - "Construct a UTC datetime from time.time()." - import warnings - warnings.warn("datetime.utcnow() is deprecated and scheduled for " - "removal in a future version. Instead, Use timezone-aware " - "objects to represent datetimes in UTC: " - "datetime.now(datetime.UTC).", - DeprecationWarning, - stacklevel=2) - t = _time.time() - return cls._fromtimestamp(t, True, None) - - @classmethod - def combine(cls, date, time, tzinfo=True): - "Construct a datetime from a given date and a given time." - if not isinstance(date, _date_class): - raise TypeError("date argument must be a date instance") - if not isinstance(time, _time_class): - raise TypeError("time argument must be a time instance") - if tzinfo is True: - tzinfo = time.tzinfo - return cls(date.year, date.month, date.day, - time.hour, time.minute, time.second, time.microsecond, - tzinfo, fold=time.fold) - - @classmethod - def fromisoformat(cls, date_string): - """Construct a datetime from a string in one of the ISO 8601 formats.""" - if not isinstance(date_string, str): - raise TypeError('fromisoformat: argument must be str') - - if len(date_string) < 7: - raise ValueError(f'Invalid isoformat string: {date_string!r}') - - # Split this at the separator - try: - separator_location = _find_isoformat_datetime_separator(date_string) - dstr = date_string[0:separator_location] - tstr = date_string[(separator_location+1):] - - date_components = _parse_isoformat_date(dstr) - except ValueError: - raise ValueError( - f'Invalid isoformat string: {date_string!r}') from None - - if tstr: - try: - time_components = _parse_isoformat_time(tstr) - except ValueError: - raise ValueError( - f'Invalid isoformat string: {date_string!r}') from None - else: - time_components = [0, 0, 0, 0, None] - - return cls(*(date_components + time_components)) - - def timetuple(self): - "Return local time tuple compatible with time.localtime()." - dst = self.dst() - if dst is None: - dst = -1 - elif dst: - dst = 1 - else: - dst = 0 - return _build_struct_time(self.year, self.month, self.day, - self.hour, self.minute, self.second, - dst) - - def _mktime(self): - """Return integer POSIX timestamp.""" - epoch = datetime(1970, 1, 1) - max_fold_seconds = 24 * 3600 - t = (self - epoch) // timedelta(0, 1) - def local(u): - y, m, d, hh, mm, ss = _time.localtime(u)[:6] - return (datetime(y, m, d, hh, mm, ss) - epoch) // timedelta(0, 1) - - # Our goal is to solve t = local(u) for u. - a = local(t) - t - u1 = t - a - t1 = local(u1) - if t1 == t: - # We found one solution, but it may not be the one we need. - # Look for an earlier solution (if `fold` is 0), or a - # later one (if `fold` is 1). - u2 = u1 + (-max_fold_seconds, max_fold_seconds)[self.fold] - b = local(u2) - u2 - if a == b: - return u1 - else: - b = t1 - u1 - assert a != b - u2 = t - b - t2 = local(u2) - if t2 == t: - return u2 - if t1 == t: - return u1 - # We have found both offsets a and b, but neither t - a nor t - b is - # a solution. This means t is in the gap. - return (max, min)[self.fold](u1, u2) - - - def timestamp(self): - "Return POSIX timestamp as float" - if self._tzinfo is None: - s = self._mktime() - return s + self.microsecond / 1e6 - else: - return (self - _EPOCH).total_seconds() - - def utctimetuple(self): - "Return UTC time tuple compatible with time.gmtime()." - offset = self.utcoffset() - if offset: - self -= offset - y, m, d = self.year, self.month, self.day - hh, mm, ss = self.hour, self.minute, self.second - return _build_struct_time(y, m, d, hh, mm, ss, 0) - - def date(self): - "Return the date part." - return date(self._year, self._month, self._day) - - def time(self): - "Return the time part, with tzinfo None." - return time(self.hour, self.minute, self.second, self.microsecond, fold=self.fold) - - def timetz(self): - "Return the time part, with same tzinfo." - return time(self.hour, self.minute, self.second, self.microsecond, - self._tzinfo, fold=self.fold) - - def replace(self, year=None, month=None, day=None, hour=None, - minute=None, second=None, microsecond=None, tzinfo=True, - *, fold=None): - """Return a new datetime with new values for the specified fields.""" - if year is None: - year = self.year - if month is None: - month = self.month - if day is None: - day = self.day - if hour is None: - hour = self.hour - if minute is None: - minute = self.minute - if second is None: - second = self.second - if microsecond is None: - microsecond = self.microsecond - if tzinfo is True: - tzinfo = self.tzinfo - if fold is None: - fold = self.fold - return type(self)(year, month, day, hour, minute, second, - microsecond, tzinfo, fold=fold) - - def _local_timezone(self): - if self.tzinfo is None: - ts = self._mktime() - # Detect gap - ts2 = self.replace(fold=1-self.fold)._mktime() - if ts2 != ts: # This happens in a gap or a fold - if (ts2 > ts) == self.fold: - ts = ts2 - else: - ts = (self - _EPOCH) // timedelta(seconds=1) - localtm = _time.localtime(ts) - local = datetime(*localtm[:6]) - # Extract TZ data - gmtoff = localtm.tm_gmtoff - zone = localtm.tm_zone - return timezone(timedelta(seconds=gmtoff), zone) - - def astimezone(self, tz=None): - if tz is None: - tz = self._local_timezone() - elif not isinstance(tz, tzinfo): - raise TypeError("tz argument must be an instance of tzinfo") - - mytz = self.tzinfo - if mytz is None: - mytz = self._local_timezone() - myoffset = mytz.utcoffset(self) - else: - myoffset = mytz.utcoffset(self) - if myoffset is None: - mytz = self.replace(tzinfo=None)._local_timezone() - myoffset = mytz.utcoffset(self) - - if tz is mytz: - return self - - # Convert self to UTC, and attach the new time zone object. - utc = (self - myoffset).replace(tzinfo=tz) - - # Convert from UTC to tz's local time. - return tz.fromutc(utc) - - # Ways to produce a string. - - def ctime(self): - "Return ctime() style string." - weekday = self.toordinal() % 7 or 7 - return "%s %s %2d %02d:%02d:%02d %04d" % ( - _DAYNAMES[weekday], - _MONTHNAMES[self._month], - self._day, - self._hour, self._minute, self._second, - self._year) - - def isoformat(self, sep='T', timespec='auto'): - """Return the time formatted according to ISO. - - The full format looks like 'YYYY-MM-DD HH:MM:SS.mmmmmm'. - By default, the fractional part is omitted if self.microsecond == 0. - - If self.tzinfo is not None, the UTC offset is also attached, giving - giving a full format of 'YYYY-MM-DD HH:MM:SS.mmmmmm+HH:MM'. - - Optional argument sep specifies the separator between date and - time, default 'T'. - - The optional argument timespec specifies the number of additional - terms of the time to include. Valid options are 'auto', 'hours', - 'minutes', 'seconds', 'milliseconds' and 'microseconds'. - """ - s = ("%04d-%02d-%02d%c" % (self._year, self._month, self._day, sep) + - _format_time(self._hour, self._minute, self._second, - self._microsecond, timespec)) - - off = self.utcoffset() - tz = _format_offset(off) - if tz: - s += tz - - return s - - def __repr__(self): - """Convert to formal string, for repr().""" - L = [self._year, self._month, self._day, # These are never zero - self._hour, self._minute, self._second, self._microsecond] - if L[-1] == 0: - del L[-1] - if L[-1] == 0: - del L[-1] - s = "%s.%s(%s)" % (self.__class__.__module__, - self.__class__.__qualname__, - ", ".join(map(str, L))) - if self._tzinfo is not None: - assert s[-1:] == ")" - s = s[:-1] + ", tzinfo=%r" % self._tzinfo + ")" - if self._fold: - assert s[-1:] == ")" - s = s[:-1] + ", fold=1)" - return s - - def __str__(self): - "Convert to string, for str()." - return self.isoformat(sep=' ') - - @classmethod - def strptime(cls, date_string, format): - 'string, format -> new datetime parsed from a string (like time.strptime()).' - import _strptime - return _strptime._strptime_datetime(cls, date_string, format) - - def utcoffset(self): - """Return the timezone offset as timedelta positive east of UTC (negative west of - UTC).""" - if self._tzinfo is None: - return None - offset = self._tzinfo.utcoffset(self) - _check_utc_offset("utcoffset", offset) - return offset - - def tzname(self): - """Return the timezone name. - - Note that the name is 100% informational -- there's no requirement that - it mean anything in particular. For example, "GMT", "UTC", "-500", - "-5:00", "EDT", "US/Eastern", "America/New York" are all valid replies. - """ - if self._tzinfo is None: - return None - name = self._tzinfo.tzname(self) - _check_tzname(name) - return name - - def dst(self): - """Return 0 if DST is not in effect, or the DST offset (as timedelta - positive eastward) if DST is in effect. - - This is purely informational; the DST offset has already been added to - the UTC offset returned by utcoffset() if applicable, so there's no - need to consult dst() unless you're interested in displaying the DST - info. - """ - if self._tzinfo is None: - return None - offset = self._tzinfo.dst(self) - _check_utc_offset("dst", offset) - return offset - - # Comparisons of datetime objects with other. - - def __eq__(self, other): - if isinstance(other, datetime): - return self._cmp(other, allow_mixed=True) == 0 - elif not isinstance(other, date): - return NotImplemented - else: - return False - - def __le__(self, other): - if isinstance(other, datetime): - return self._cmp(other) <= 0 - elif not isinstance(other, date): - return NotImplemented - else: - _cmperror(self, other) - - def __lt__(self, other): - if isinstance(other, datetime): - return self._cmp(other) < 0 - elif not isinstance(other, date): - return NotImplemented - else: - _cmperror(self, other) - - def __ge__(self, other): - if isinstance(other, datetime): - return self._cmp(other) >= 0 - elif not isinstance(other, date): - return NotImplemented - else: - _cmperror(self, other) - - def __gt__(self, other): - if isinstance(other, datetime): - return self._cmp(other) > 0 - elif not isinstance(other, date): - return NotImplemented - else: - _cmperror(self, other) - - def _cmp(self, other, allow_mixed=False): - assert isinstance(other, datetime) - mytz = self._tzinfo - ottz = other._tzinfo - myoff = otoff = None - - if mytz is ottz: - base_compare = True - else: - myoff = self.utcoffset() - otoff = other.utcoffset() - # Assume that allow_mixed means that we are called from __eq__ - if allow_mixed: - if myoff != self.replace(fold=not self.fold).utcoffset(): - return 2 - if otoff != other.replace(fold=not other.fold).utcoffset(): - return 2 - base_compare = myoff == otoff - - if base_compare: - return _cmp((self._year, self._month, self._day, - self._hour, self._minute, self._second, - self._microsecond), - (other._year, other._month, other._day, - other._hour, other._minute, other._second, - other._microsecond)) - if myoff is None or otoff is None: - if allow_mixed: - return 2 # arbitrary non-zero value - else: - raise TypeError("cannot compare naive and aware datetimes") - # XXX What follows could be done more efficiently... - diff = self - other # this will take offsets into account - if diff.days < 0: - return -1 - return diff and 1 or 0 - - def __add__(self, other): - "Add a datetime and a timedelta." - if not isinstance(other, timedelta): - return NotImplemented - delta = timedelta(self.toordinal(), - hours=self._hour, - minutes=self._minute, - seconds=self._second, - microseconds=self._microsecond) - delta += other - hour, rem = divmod(delta.seconds, 3600) - minute, second = divmod(rem, 60) - if 0 < delta.days <= _MAXORDINAL: - return type(self).combine(date.fromordinal(delta.days), - time(hour, minute, second, - delta.microseconds, - tzinfo=self._tzinfo)) - raise OverflowError("result out of range") - - __radd__ = __add__ - - def __sub__(self, other): - "Subtract two datetimes, or a datetime and a timedelta." - if not isinstance(other, datetime): - if isinstance(other, timedelta): - return self + -other - return NotImplemented - - days1 = self.toordinal() - days2 = other.toordinal() - secs1 = self._second + self._minute * 60 + self._hour * 3600 - secs2 = other._second + other._minute * 60 + other._hour * 3600 - base = timedelta(days1 - days2, - secs1 - secs2, - self._microsecond - other._microsecond) - if self._tzinfo is other._tzinfo: - return base - myoff = self.utcoffset() - otoff = other.utcoffset() - if myoff == otoff: - return base - if myoff is None or otoff is None: - raise TypeError("cannot mix naive and timezone-aware time") - return base + otoff - myoff - - def __hash__(self): - if self._hashcode == -1: - if self.fold: - t = self.replace(fold=0) - else: - t = self - tzoff = t.utcoffset() - if tzoff is None: - self._hashcode = hash(t._getstate()[0]) - else: - days = _ymd2ord(self.year, self.month, self.day) - seconds = self.hour * 3600 + self.minute * 60 + self.second - self._hashcode = hash(timedelta(days, seconds, self.microsecond) - tzoff) - return self._hashcode - - # Pickle support. - - def _getstate(self, protocol=3): - yhi, ylo = divmod(self._year, 256) - us2, us3 = divmod(self._microsecond, 256) - us1, us2 = divmod(us2, 256) - m = self._month - if self._fold and protocol > 3: - m += 128 - basestate = bytes([yhi, ylo, m, self._day, - self._hour, self._minute, self._second, - us1, us2, us3]) - if self._tzinfo is None: - return (basestate,) - else: - return (basestate, self._tzinfo) - - def __setstate(self, string, tzinfo): - if tzinfo is not None and not isinstance(tzinfo, _tzinfo_class): - raise TypeError("bad tzinfo state arg") - (yhi, ylo, m, self._day, self._hour, - self._minute, self._second, us1, us2, us3) = string - if m > 127: - self._fold = 1 - self._month = m - 128 - else: - self._fold = 0 - self._month = m - self._year = yhi * 256 + ylo - self._microsecond = (((us1 << 8) | us2) << 8) | us3 - self._tzinfo = tzinfo - - def __reduce_ex__(self, protocol): - return (self.__class__, self._getstate(protocol)) - - def __reduce__(self): - return self.__reduce_ex__(2) - - -datetime.min = datetime(1, 1, 1) -datetime.max = datetime(9999, 12, 31, 23, 59, 59, 999999) -datetime.resolution = timedelta(microseconds=1) - - -def _isoweek1monday(year): - # Helper to calculate the day number of the Monday starting week 1 - # XXX This could be done more efficiently - THURSDAY = 3 - firstday = _ymd2ord(year, 1, 1) - firstweekday = (firstday + 6) % 7 # See weekday() above - week1monday = firstday - firstweekday - if firstweekday > THURSDAY: - week1monday += 7 - return week1monday - - -class timezone(tzinfo): - __slots__ = '_offset', '_name' - - # Sentinel value to disallow None - _Omitted = object() - def __new__(cls, offset, name=_Omitted): - if not isinstance(offset, timedelta): - raise TypeError("offset must be a timedelta") - if name is cls._Omitted: - if not offset: - return cls.utc - name = None - elif not isinstance(name, str): - raise TypeError("name must be a string") - if not cls._minoffset <= offset <= cls._maxoffset: - raise ValueError("offset must be a timedelta " - "strictly between -timedelta(hours=24) and " - "timedelta(hours=24).") - return cls._create(offset, name) - - @classmethod - def _create(cls, offset, name=None): - self = tzinfo.__new__(cls) - self._offset = offset - self._name = name - return self - - def __getinitargs__(self): - """pickle support""" - if self._name is None: - return (self._offset,) - return (self._offset, self._name) - - def __eq__(self, other): - if isinstance(other, timezone): - return self._offset == other._offset - return NotImplemented - - def __hash__(self): - return hash(self._offset) - - def __repr__(self): - """Convert to formal string, for repr(). - - >>> tz = timezone.utc - >>> repr(tz) - 'datetime.timezone.utc' - >>> tz = timezone(timedelta(hours=-5), 'EST') - >>> repr(tz) - "datetime.timezone(datetime.timedelta(-1, 68400), 'EST')" - """ - if self is self.utc: - return 'datetime.timezone.utc' - if self._name is None: - return "%s.%s(%r)" % (self.__class__.__module__, - self.__class__.__qualname__, - self._offset) - return "%s.%s(%r, %r)" % (self.__class__.__module__, - self.__class__.__qualname__, - self._offset, self._name) - - def __str__(self): - return self.tzname(None) - - def utcoffset(self, dt): - if isinstance(dt, datetime) or dt is None: - return self._offset - raise TypeError("utcoffset() argument must be a datetime instance" - " or None") - - def tzname(self, dt): - if isinstance(dt, datetime) or dt is None: - if self._name is None: - return self._name_from_offset(self._offset) - return self._name - raise TypeError("tzname() argument must be a datetime instance" - " or None") - - def dst(self, dt): - if isinstance(dt, datetime) or dt is None: - return None - raise TypeError("dst() argument must be a datetime instance" - " or None") - - def fromutc(self, dt): - if isinstance(dt, datetime): - if dt.tzinfo is not self: - raise ValueError("fromutc: dt.tzinfo " - "is not self") - return dt + self._offset - raise TypeError("fromutc() argument must be a datetime instance" - " or None") - - _maxoffset = timedelta(hours=24, microseconds=-1) - _minoffset = -_maxoffset - - @staticmethod - def _name_from_offset(delta): - if not delta: - return 'UTC' - if delta < timedelta(0): - sign = '-' - delta = -delta - else: - sign = '+' - hours, rest = divmod(delta, timedelta(hours=1)) - minutes, rest = divmod(rest, timedelta(minutes=1)) - seconds = rest.seconds - microseconds = rest.microseconds - if microseconds: - return (f'UTC{sign}{hours:02d}:{minutes:02d}:{seconds:02d}' - f'.{microseconds:06d}') - if seconds: - return f'UTC{sign}{hours:02d}:{minutes:02d}:{seconds:02d}' - return f'UTC{sign}{hours:02d}:{minutes:02d}' - -UTC = timezone.utc = timezone._create(timedelta(0)) - -# bpo-37642: These attributes are rounded to the nearest minute for backwards -# compatibility, even though the constructor will accept a wider range of -# values. This may change in the future. -timezone.min = timezone._create(-timedelta(hours=23, minutes=59)) -timezone.max = timezone._create(timedelta(hours=23, minutes=59)) -_EPOCH = datetime(1970, 1, 1, tzinfo=timezone.utc) - -# Some time zone algebra. For a datetime x, let -# x.n = x stripped of its timezone -- its naive time. -# x.o = x.utcoffset(), and assuming that doesn't raise an exception or -# return None -# x.d = x.dst(), and assuming that doesn't raise an exception or -# return None -# x.s = x's standard offset, x.o - x.d -# -# Now some derived rules, where k is a duration (timedelta). -# -# 1. x.o = x.s + x.d -# This follows from the definition of x.s. -# -# 2. If x and y have the same tzinfo member, x.s = y.s. -# This is actually a requirement, an assumption we need to make about -# sane tzinfo classes. -# -# 3. The naive UTC time corresponding to x is x.n - x.o. -# This is again a requirement for a sane tzinfo class. -# -# 4. (x+k).s = x.s -# This follows from #2, and that datetime.timetz+timedelta preserves tzinfo. -# -# 5. (x+k).n = x.n + k -# Again follows from how arithmetic is defined. -# -# Now we can explain tz.fromutc(x). Let's assume it's an interesting case -# (meaning that the various tzinfo methods exist, and don't blow up or return -# None when called). -# -# The function wants to return a datetime y with timezone tz, equivalent to x. -# x is already in UTC. -# -# By #3, we want -# -# y.n - y.o = x.n [1] -# -# The algorithm starts by attaching tz to x.n, and calling that y. So -# x.n = y.n at the start. Then it wants to add a duration k to y, so that [1] -# becomes true; in effect, we want to solve [2] for k: -# -# (y+k).n - (y+k).o = x.n [2] -# -# By #1, this is the same as -# -# (y+k).n - ((y+k).s + (y+k).d) = x.n [3] -# -# By #5, (y+k).n = y.n + k, which equals x.n + k because x.n=y.n at the start. -# Substituting that into [3], -# -# x.n + k - (y+k).s - (y+k).d = x.n; the x.n terms cancel, leaving -# k - (y+k).s - (y+k).d = 0; rearranging, -# k = (y+k).s - (y+k).d; by #4, (y+k).s == y.s, so -# k = y.s - (y+k).d -# -# On the RHS, (y+k).d can't be computed directly, but y.s can be, and we -# approximate k by ignoring the (y+k).d term at first. Note that k can't be -# very large, since all offset-returning methods return a duration of magnitude -# less than 24 hours. For that reason, if y is firmly in std time, (y+k).d must -# be 0, so ignoring it has no consequence then. -# -# In any case, the new value is -# -# z = y + y.s [4] -# -# It's helpful to step back at look at [4] from a higher level: it's simply -# mapping from UTC to tz's standard time. -# -# At this point, if -# -# z.n - z.o = x.n [5] -# -# we have an equivalent time, and are almost done. The insecurity here is -# at the start of daylight time. Picture US Eastern for concreteness. The wall -# time jumps from 1:59 to 3:00, and wall hours of the form 2:MM don't make good -# sense then. The docs ask that an Eastern tzinfo class consider such a time to -# be EDT (because it's "after 2"), which is a redundant spelling of 1:MM EST -# on the day DST starts. We want to return the 1:MM EST spelling because that's -# the only spelling that makes sense on the local wall clock. -# -# In fact, if [5] holds at this point, we do have the standard-time spelling, -# but that takes a bit of proof. We first prove a stronger result. What's the -# difference between the LHS and RHS of [5]? Let -# -# diff = x.n - (z.n - z.o) [6] -# -# Now -# z.n = by [4] -# (y + y.s).n = by #5 -# y.n + y.s = since y.n = x.n -# x.n + y.s = since z and y are have the same tzinfo member, -# y.s = z.s by #2 -# x.n + z.s -# -# Plugging that back into [6] gives -# -# diff = -# x.n - ((x.n + z.s) - z.o) = expanding -# x.n - x.n - z.s + z.o = cancelling -# - z.s + z.o = by #2 -# z.d -# -# So diff = z.d. -# -# If [5] is true now, diff = 0, so z.d = 0 too, and we have the standard-time -# spelling we wanted in the endcase described above. We're done. Contrarily, -# if z.d = 0, then we have a UTC equivalent, and are also done. -# -# If [5] is not true now, diff = z.d != 0, and z.d is the offset we need to -# add to z (in effect, z is in tz's standard time, and we need to shift the -# local clock into tz's daylight time). -# -# Let -# -# z' = z + z.d = z + diff [7] -# -# and we can again ask whether -# -# z'.n - z'.o = x.n [8] -# -# If so, we're done. If not, the tzinfo class is insane, according to the -# assumptions we've made. This also requires a bit of proof. As before, let's -# compute the difference between the LHS and RHS of [8] (and skipping some of -# the justifications for the kinds of substitutions we've done several times -# already): -# -# diff' = x.n - (z'.n - z'.o) = replacing z'.n via [7] -# x.n - (z.n + diff - z'.o) = replacing diff via [6] -# x.n - (z.n + x.n - (z.n - z.o) - z'.o) = -# x.n - z.n - x.n + z.n - z.o + z'.o = cancel x.n -# - z.n + z.n - z.o + z'.o = cancel z.n -# - z.o + z'.o = #1 twice -# -z.s - z.d + z'.s + z'.d = z and z' have same tzinfo -# z'.d - z.d -# -# So z' is UTC-equivalent to x iff z'.d = z.d at this point. If they are equal, -# we've found the UTC-equivalent so are done. In fact, we stop with [7] and -# return z', not bothering to compute z'.d. -# -# How could z.d and z'd differ? z' = z + z.d [7], so merely moving z' by -# a dst() offset, and starting *from* a time already in DST (we know z.d != 0), -# would have to change the result dst() returns: we start in DST, and moving -# a little further into it takes us out of DST. -# -# There isn't a sane case where this can happen. The closest it gets is at -# the end of DST, where there's an hour in UTC with no spelling in a hybrid -# tzinfo class. In US Eastern, that's 5:MM UTC = 0:MM EST = 1:MM EDT. During -# that hour, on an Eastern clock 1:MM is taken as being in standard time (6:MM -# UTC) because the docs insist on that, but 0:MM is taken as being in daylight -# time (4:MM UTC). There is no local time mapping to 5:MM UTC. The local -# clock jumps from 1:59 back to 1:00 again, and repeats the 1:MM hour in -# standard time. Since that's what the local clock *does*, we want to map both -# UTC hours 5:MM and 6:MM to 1:MM Eastern. The result is ambiguous -# in local time, but so it goes -- it's the way the local clock works. -# -# When x = 5:MM UTC is the input to this algorithm, x.o=0, y.o=-5 and y.d=0, -# so z=0:MM. z.d=60 (minutes) then, so [5] doesn't hold and we keep going. -# z' = z + z.d = 1:MM then, and z'.d=0, and z'.d - z.d = -60 != 0 so [8] -# (correctly) concludes that z' is not UTC-equivalent to x. -# -# Because we know z.d said z was in daylight time (else [5] would have held and -# we would have stopped then), and we know z.d != z'.d (else [8] would have held -# and we have stopped then), and there are only 2 possible values dst() can -# return in Eastern, it follows that z'.d must be 0 (which it is in the example, -# but the reasoning doesn't depend on the example -- it depends on there being -# two possible dst() outcomes, one zero and the other non-zero). Therefore -# z' must be in standard time, and is the spelling we want in this case. -# -# Note again that z' is not UTC-equivalent as far as the hybrid tzinfo class is -# concerned (because it takes z' as being in standard time rather than the -# daylight time we intend here), but returning it gives the real-life "local -# clock repeats an hour" behavior when mapping the "unspellable" UTC hour into -# tz. -# -# When the input is 6:MM, z=1:MM and z.d=0, and we stop at once, again with -# the 1:MM standard time spelling we want. -# -# So how can this break? One of the assumptions must be violated. Two -# possibilities: -# -# 1) [2] effectively says that y.s is invariant across all y belong to a given -# time zone. This isn't true if, for political reasons or continental drift, -# a region decides to change its base offset from UTC. -# -# 2) There may be versions of "double daylight" time where the tail end of -# the analysis gives up a step too early. I haven't thought about that -# enough to say. -# -# In any case, it's clear that the default fromutc() is strong enough to handle -# "almost all" time zones: so long as the standard offset is invariant, it -# doesn't matter if daylight time transition points change from year to year, or -# if daylight time is skipped in some years; it doesn't matter how large or -# small dst() may get within its bounds; and it doesn't even matter if some -# perverse time zone returns a negative dst()). So a breaking case must be -# pretty bizarre, and a tzinfo subclass can override fromutc() if it is. - try: from _datetime import * -except ImportError: - pass -else: - # Clean up unused names - del (_DAYNAMES, _DAYS_BEFORE_MONTH, _DAYS_IN_MONTH, _DI100Y, _DI400Y, - _DI4Y, _EPOCH, _MAXORDINAL, _MONTHNAMES, _build_struct_time, - _check_date_fields, _check_time_fields, - _check_tzinfo_arg, _check_tzname, _check_utc_offset, _cmp, _cmperror, - _date_class, _days_before_month, _days_before_year, _days_in_month, - _format_time, _format_offset, _index, _is_leap, _isoweek1monday, _math, - _ord2ymd, _time, _time_class, _tzinfo_class, _wrap_strftime, _ymd2ord, - _divide_and_round, _parse_isoformat_date, _parse_isoformat_time, - _parse_hh_mm_ss_ff, _IsoCalendarDate, _isoweek_to_gregorian, - _find_isoformat_datetime_separator, _FRACTION_CORRECTION, - _is_ascii_digit) - # XXX Since import * above excludes names that start with _, - # docstring does not get overwritten. In the future, it may be - # appropriate to maintain a single module level docstring and - # remove the following line. from _datetime import __doc__ +except ImportError: + from _pydatetime import * + from _pydatetime import __doc__ + +__all__ = ("date", "datetime", "time", "timedelta", "timezone", "tzinfo", + "MINYEAR", "MAXYEAR") diff --git a/Lib/dis.py b/Lib/dis.py index 85c109584bf94f..3a8e6ac3bf5ace 100644 --- a/Lib/dis.py +++ b/Lib/dis.py @@ -11,6 +11,8 @@ _cache_format, _inline_cache_entries, _nb_ops, + _intrinsic_1_descs, + _intrinsic_2_descs, _specializations, _specialized_instructions, ) @@ -42,6 +44,8 @@ SEND = opmap['SEND'] LOAD_ATTR = opmap['LOAD_ATTR'] LOAD_SUPER_ATTR = opmap['LOAD_SUPER_ATTR'] +CALL_INTRINSIC_1 = opmap['CALL_INTRINSIC_1'] +CALL_INTRINSIC_2 = opmap['CALL_INTRINSIC_2'] CACHE = opmap["CACHE"] @@ -506,6 +510,10 @@ def _get_instructions_bytes(code, varname_from_oparg=None, if arg & (1< _MAXLINE: - raise LineTooLong("header line") - if not line: - # for sites which EOF without sending a trailer - break - if line in (b'\r\n', b'\n', b''): - break + self._proxy_response_headers = parse_headers(response.fp) if self.debuglevel > 0: - print('header:', line.decode()) + for hdr, val in self._proxy_response_headers.items(): + print("header:", hdr + ":", val) + + if code != http.HTTPStatus.OK: + self.close() + raise OSError(f"Tunnel connection failed: {code} {message.strip()}") + + finally: + response.close() def connect(self): """Connect to the host and port specified in __init__.""" diff --git a/Lib/http/server.py b/Lib/http/server.py index 971f08046d50b5..a245ffb307860a 100644 --- a/Lib/http/server.py +++ b/Lib/http/server.py @@ -791,7 +791,7 @@ def list_directory(self, path): displaypath = urllib.parse.unquote(self.path, errors='surrogatepass') except UnicodeDecodeError: - displaypath = urllib.parse.unquote(path) + displaypath = urllib.parse.unquote(self.path) displaypath = html.escape(displaypath, quote=False) enc = sys.getfilesystemencoding() title = f'Directory listing for {displaypath}' diff --git a/Lib/idlelib/idle_test/test_sidebar.py b/Lib/idlelib/idle_test/test_sidebar.py index 5506fd2b0e22a5..fb52b3a0179553 100644 --- a/Lib/idlelib/idle_test/test_sidebar.py +++ b/Lib/idlelib/idle_test/test_sidebar.py @@ -57,7 +57,7 @@ def setUpClass(cls): @classmethod def tearDownClass(cls): cls.editwin.per.close() - cls.root.update() + cls.root.update_idletasks() cls.root.destroy() del cls.text, cls.text_frame, cls.editwin, cls.root @@ -695,7 +695,8 @@ def test_mousewheel(self): delta = -1 if sys.platform == 'darwin' else 120 sidebar.canvas.event_generate('', x=0, y=0, delta=delta) yield - self.assertIsNone(text.dlineinfo(text.index(f'{last_lineno}.0'))) + if sys.platform != 'darwin': # .update_idletasks() does not work. + self.assertIsNone(text.dlineinfo(text.index(f'{last_lineno}.0'))) # Scroll back down using the event. sidebar.canvas.event_generate('', x=0, y=0) diff --git a/Lib/importlib/__init__.py b/Lib/importlib/__init__.py index 21d9dee652b3df..707c081cb2c5b6 100644 --- a/Lib/importlib/__init__.py +++ b/Lib/importlib/__init__.py @@ -70,40 +70,6 @@ def invalidate_caches(): finder.invalidate_caches() -def find_loader(name, path=None): - """Return the loader for the specified module. - - This is a backward-compatible wrapper around find_spec(). - - This function is deprecated in favor of importlib.util.find_spec(). - - """ - warnings.warn('Deprecated since Python 3.4 and slated for removal in ' - 'Python 3.12; use importlib.util.find_spec() instead', - DeprecationWarning, stacklevel=2) - try: - loader = sys.modules[name].__loader__ - if loader is None: - raise ValueError(f'{name}.__loader__ is None') - else: - return loader - except KeyError: - pass - except AttributeError: - raise ValueError(f'{name}.__loader__ is not set') from None - - spec = _bootstrap._find_spec(name, path) - # We won't worry about malformed specs (missing attributes). - if spec is None: - return None - if spec.loader is None: - if spec.submodule_search_locations is None: - raise ImportError(f'spec for {name} missing loader', name=name) - raise ImportError('namespace packages do not have loaders', - name=name) - return spec.loader - - def import_module(name, package=None): """Import a module. diff --git a/Lib/importlib/_bootstrap.py b/Lib/importlib/_bootstrap.py index e4fcaa61e6de29..c48fd506a0e4eb 100644 --- a/Lib/importlib/_bootstrap.py +++ b/Lib/importlib/_bootstrap.py @@ -892,21 +892,6 @@ def find_spec(cls, fullname, path=None, target=None): else: return None - @classmethod - def find_module(cls, fullname, path=None): - """Find the built-in module. - - If 'path' is ever specified then the search is considered a failure. - - This method is deprecated. Use find_spec() instead. - - """ - _warnings.warn("BuiltinImporter.find_module() is deprecated and " - "slated for removal in Python 3.12; use find_spec() instead", - DeprecationWarning) - spec = cls.find_spec(fullname, path) - return spec.loader if spec is not None else None - @staticmethod def create_module(spec): """Create a built-in module""" @@ -1076,18 +1061,6 @@ def find_spec(cls, fullname, path=None, target=None): spec.submodule_search_locations.insert(0, pkgdir) return spec - @classmethod - def find_module(cls, fullname, path=None): - """Find a frozen module. - - This method is deprecated. Use find_spec() instead. - - """ - _warnings.warn("FrozenImporter.find_module() is deprecated and " - "slated for removal in Python 3.12; use find_spec() instead", - DeprecationWarning) - return cls if _imp.is_frozen(fullname) else None - @staticmethod def create_module(spec): """Set __file__, if able.""" @@ -1170,16 +1143,6 @@ def _resolve_name(name, package, level): return f'{base}.{name}' if name else base -def _find_spec_legacy(finder, name, path): - msg = (f"{_object_name(finder)}.find_spec() not found; " - "falling back to find_module()") - _warnings.warn(msg, ImportWarning) - loader = finder.find_module(name, path) - if loader is None: - return None - return spec_from_loader(name, loader) - - def _find_spec(name, path, target=None): """Find a module's spec.""" meta_path = sys.meta_path @@ -1200,9 +1163,7 @@ def _find_spec(name, path, target=None): try: find_spec = finder.find_spec except AttributeError: - spec = _find_spec_legacy(finder, name, path) - if spec is None: - continue + continue else: spec = find_spec(name, path, target) if spec is not None: diff --git a/Lib/importlib/_bootstrap_external.py b/Lib/importlib/_bootstrap_external.py index cb227373ca2fd4..d4a1593db2c874 100644 --- a/Lib/importlib/_bootstrap_external.py +++ b/Lib/importlib/_bootstrap_external.py @@ -442,6 +442,7 @@ def _write_atomic(path, data, mode=0o666): # Python 3.12b1 3526 (Add instrumentation support) # Python 3.12b1 3527 (Add LOAD_SUPER_ATTR) # Python 3.12b1 3528 (Add LOAD_SUPER_ATTR_METHOD specialization) +# Python 3.12b1 3529 (Inline list/dict/set comprehensions) # Python 3.13 will start with 3550 @@ -458,7 +459,7 @@ def _write_atomic(path, data, mode=0o666): # Whenever MAGIC_NUMBER is changed, the ranges in the magic_values array # in PC/launcher.c must also be updated. -MAGIC_NUMBER = (3528).to_bytes(2, 'little') + b'\r\n' +MAGIC_NUMBER = (3529).to_bytes(2, 'little') + b'\r\n' _RAW_MAGIC_NUMBER = int.from_bytes(MAGIC_NUMBER, 'little') # For import.c @@ -659,26 +660,6 @@ def _wrap(new, old): return _check_name_wrapper -def _find_module_shim(self, fullname): - """Try to find a loader for the specified module by delegating to - self.find_loader(). - - This method is deprecated in favor of finder.find_spec(). - - """ - _warnings.warn("find_module() is deprecated and " - "slated for removal in Python 3.12; use find_spec() instead", - DeprecationWarning) - # Call find_loader(). If it returns a string (indicating this - # is a namespace package portion), generate a warning and - # return None. - loader, portions = self.find_loader(fullname) - if loader is None and len(portions): - msg = f'Not importing directory {portions[0]}: missing __init__' - _warnings.warn(msg, ImportWarning) - return loader - - def _classify_pyc(data, name, exc_details): """Perform basic validity checking of a pyc header and return the flags field, which determines how the pyc should be further validated against the source. @@ -985,22 +966,6 @@ def find_spec(cls, fullname, path=None, target=None): origin=filepath) return spec - @classmethod - def find_module(cls, fullname, path=None): - """Find module named in the registry. - - This method is deprecated. Use find_spec() instead. - - """ - _warnings.warn("WindowsRegistryFinder.find_module() is deprecated and " - "slated for removal in Python 3.12; use find_spec() instead", - DeprecationWarning) - spec = cls.find_spec(fullname, path) - if spec is not None: - return spec.loader - else: - return None - class _LoaderBasics: @@ -1517,27 +1482,6 @@ def _path_importer_cache(cls, path): sys.path_importer_cache[path] = finder return finder - @classmethod - def _legacy_get_spec(cls, fullname, finder): - # This would be a good place for a DeprecationWarning if - # we ended up going that route. - if hasattr(finder, 'find_loader'): - msg = (f"{_bootstrap._object_name(finder)}.find_spec() not found; " - "falling back to find_loader()") - _warnings.warn(msg, ImportWarning) - loader, portions = finder.find_loader(fullname) - else: - msg = (f"{_bootstrap._object_name(finder)}.find_spec() not found; " - "falling back to find_module()") - _warnings.warn(msg, ImportWarning) - loader = finder.find_module(fullname) - portions = [] - if loader is not None: - return _bootstrap.spec_from_loader(fullname, loader) - spec = _bootstrap.ModuleSpec(fullname, None) - spec.submodule_search_locations = portions - return spec - @classmethod def _get_spec(cls, fullname, path, target=None): """Find the loader or namespace_path for this module/package name.""" @@ -1549,10 +1493,7 @@ def _get_spec(cls, fullname, path, target=None): continue finder = cls._path_importer_cache(entry) if finder is not None: - if hasattr(finder, 'find_spec'): - spec = finder.find_spec(fullname, target) - else: - spec = cls._legacy_get_spec(fullname, finder) + spec = finder.find_spec(fullname, target) if spec is None: continue if spec.loader is not None: @@ -1594,22 +1535,6 @@ def find_spec(cls, fullname, path=None, target=None): else: return spec - @classmethod - def find_module(cls, fullname, path=None): - """find the module on sys.path or 'path' based on sys.path_hooks and - sys.path_importer_cache. - - This method is deprecated. Use find_spec() instead. - - """ - _warnings.warn("PathFinder.find_module() is deprecated and " - "slated for removal in Python 3.12; use find_spec() instead", - DeprecationWarning) - spec = cls.find_spec(fullname, path) - if spec is None: - return None - return spec.loader - @staticmethod def find_distributions(*args, **kwargs): """ @@ -1654,23 +1579,6 @@ def invalidate_caches(self): """Invalidate the directory mtime.""" self._path_mtime = -1 - find_module = _find_module_shim - - def find_loader(self, fullname): - """Try to find a loader for the specified module, or the namespace - package portions. Returns (loader, list-of-portions). - - This method is deprecated. Use find_spec() instead. - - """ - _warnings.warn("FileFinder.find_loader() is deprecated and " - "slated for removal in Python 3.12; use find_spec() instead", - DeprecationWarning) - spec = self.find_spec(fullname) - if spec is None: - return None, [] - return spec.loader, spec.submodule_search_locations or [] - def _get_spec(self, loader_class, fullname, path, smsl, target): loader = loader_class(fullname, path) return spec_from_file_location(fullname, path, loader=loader, diff --git a/Lib/importlib/abc.py b/Lib/importlib/abc.py index 8fa9a0f3bc1e4b..b56fa94eb9c135 100644 --- a/Lib/importlib/abc.py +++ b/Lib/importlib/abc.py @@ -19,7 +19,7 @@ __all__ = [ - 'Loader', 'Finder', 'MetaPathFinder', 'PathEntryFinder', + 'Loader', 'MetaPathFinder', 'PathEntryFinder', 'ResourceLoader', 'InspectLoader', 'ExecutionLoader', 'FileLoader', 'SourceLoader', ] @@ -49,38 +49,6 @@ def _register(abstract_cls, *classes): abstract_cls.register(frozen_cls) -class Finder(metaclass=abc.ABCMeta): - - """Legacy abstract base class for import finders. - - It may be subclassed for compatibility with legacy third party - reimplementations of the import system. Otherwise, finder - implementations should derive from the more specific MetaPathFinder - or PathEntryFinder ABCs. - - Deprecated since Python 3.3 - """ - - def __init__(self): - warnings.warn("the Finder ABC is deprecated and " - "slated for removal in Python 3.12; use MetaPathFinder " - "or PathEntryFinder instead", - DeprecationWarning) - - @abc.abstractmethod - def find_module(self, fullname, path=None): - """An abstract method that should find a module. - The fullname is a str and the optional path is a str or None. - Returns a Loader object or None. - """ - warnings.warn("importlib.abc.Finder along with its find_module() " - "method are deprecated and " - "slated for removal in Python 3.12; use " - "MetaPathFinder.find_spec() or " - "PathEntryFinder.find_spec() instead", - DeprecationWarning) - - class MetaPathFinder(metaclass=abc.ABCMeta): """Abstract base class for import finders on sys.meta_path.""" @@ -88,27 +56,6 @@ class MetaPathFinder(metaclass=abc.ABCMeta): # We don't define find_spec() here since that would break # hasattr checks we do to support backward compatibility. - def find_module(self, fullname, path): - """Return a loader for the module. - - If no module is found, return None. The fullname is a str and - the path is a list of strings or None. - - This method is deprecated since Python 3.4 in favor of - finder.find_spec(). If find_spec() exists then backwards-compatible - functionality is provided for this method. - - """ - warnings.warn("MetaPathFinder.find_module() is deprecated since Python " - "3.4 in favor of MetaPathFinder.find_spec() and is " - "slated for removal in Python 3.12", - DeprecationWarning, - stacklevel=2) - if not hasattr(self, 'find_spec'): - return None - found = self.find_spec(fullname, path) - return found.loader if found is not None else None - def invalidate_caches(self): """An optional method for clearing the finder's cache, if any. This method is used by importlib.invalidate_caches(). @@ -122,43 +69,6 @@ class PathEntryFinder(metaclass=abc.ABCMeta): """Abstract base class for path entry finders used by PathFinder.""" - # We don't define find_spec() here since that would break - # hasattr checks we do to support backward compatibility. - - def find_loader(self, fullname): - """Return (loader, namespace portion) for the path entry. - - The fullname is a str. The namespace portion is a sequence of - path entries contributing to part of a namespace package. The - sequence may be empty. If loader is not None, the portion will - be ignored. - - The portion will be discarded if another path entry finder - locates the module as a normal module or package. - - This method is deprecated since Python 3.4 in favor of - finder.find_spec(). If find_spec() is provided than backwards-compatible - functionality is provided. - """ - warnings.warn("PathEntryFinder.find_loader() is deprecated since Python " - "3.4 in favor of PathEntryFinder.find_spec() " - "(available since 3.4)", - DeprecationWarning, - stacklevel=2) - if not hasattr(self, 'find_spec'): - return None, [] - found = self.find_spec(fullname) - if found is not None: - if not found.submodule_search_locations: - portions = [] - else: - portions = found.submodule_search_locations - return found.loader, portions - else: - return None, [] - - find_module = _bootstrap_external._find_module_shim - def invalidate_caches(self): """An optional method for clearing the finder's cache, if any. This method is used by PathFinder.invalidate_caches(). diff --git a/Lib/importlib/util.py b/Lib/importlib/util.py index 5294578cc26cf3..b1d9271f8e47ca 100644 --- a/Lib/importlib/util.py +++ b/Lib/importlib/util.py @@ -112,6 +112,43 @@ def find_spec(name, package=None): return spec +# Normally we would use contextlib.contextmanager. However, this module +# is imported by runpy, which means we want to avoid any unnecessary +# dependencies. Thus we use a class. + +class allowing_all_extensions: + """A context manager that lets users skip the compatibility check. + + Normally, extensions that do not support multiple interpreters + may not be imported in a subinterpreter. That implies modules + that do not implement multi-phase init. + + Likewise for modules import in a subinterpeter with its own GIL + when the extension does not support a per-interpreter GIL. This + implies the module does not have a Py_mod_multiple_interpreters slot + set to Py_MOD_PER_INTERPRETER_GIL_SUPPORTED. + + In both cases, this context manager may be used to temporarily + disable the check for compatible extension modules. + """ + + def __init__(self, disable_check=True): + self.disable_check = disable_check + + def __enter__(self): + self.old = _imp._override_multi_interp_extensions_check(self.override) + return self + + def __exit__(self, *args): + old = self.old + del self.old + _imp._override_multi_interp_extensions_check(old) + + @property + def override(self): + return -1 if self.disable_check else 1 + + class _LazyModule(types.ModuleType): """A subclass of the module type which triggers loading upon attribute access.""" diff --git a/Lib/inspect.py b/Lib/inspect.py index 6d1d7b766cb3bb..a64e85e4fd67a4 100644 --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -43,6 +43,7 @@ "Attribute", "BlockFinder", "BoundArguments", + "BufferFlags", "CORO_CLOSED", "CORO_CREATED", "CORO_RUNNING", @@ -1766,7 +1767,9 @@ def stack(context=1): def trace(context=1): """Return a list of records for the stack below the current exception.""" - return getinnerframes(sys.exc_info()[2], context) + exc = sys.exception() + tb = None if exc is None else exc.__traceback__ + return getinnerframes(tb, context) # ------------------------------------------------ static version of getattr @@ -1791,8 +1794,9 @@ def _check_class(klass, attr): return entry.__dict__[attr] return _sentinel -def _shadowed_dict(klass): - for entry in _static_getmro(klass): +@functools.lru_cache() +def _shadowed_dict_from_mro_tuple(mro): + for entry in mro: dunder_dict = _get_dunder_dict_of_class(entry) if '__dict__' in dunder_dict: class_dict = dunder_dict['__dict__'] @@ -1802,6 +1806,9 @@ def _shadowed_dict(klass): return class_dict return _sentinel +def _shadowed_dict(klass): + return _shadowed_dict_from_mro_tuple(_static_getmro(klass)) + def getattr_static(obj, attr, default=_sentinel): """Retrieve attributes without triggering dynamic lookup via the descriptor protocol, __getattr__ or __getattribute__. @@ -3310,6 +3317,28 @@ def signature(obj, *, follow_wrapped=True, globals=None, locals=None, eval_str=F globals=globals, locals=locals, eval_str=eval_str) +class BufferFlags(enum.IntFlag): + SIMPLE = 0x0 + WRITABLE = 0x1 + FORMAT = 0x4 + ND = 0x8 + STRIDES = 0x10 | ND + C_CONTIGUOUS = 0x20 | STRIDES + F_CONTIGUOUS = 0x40 | STRIDES + ANY_CONTIGUOUS = 0x80 | STRIDES + INDIRECT = 0x100 | STRIDES + CONTIG = ND | WRITABLE + CONTIG_RO = ND + STRIDED = STRIDES | WRITABLE + STRIDED_RO = STRIDES + RECORDS = STRIDES | WRITABLE | FORMAT + RECORDS_RO = STRIDES | FORMAT + FULL = INDIRECT | WRITABLE | FORMAT + FULL_RO = INDIRECT | FORMAT + READ = 0x100 + WRITE = 0x200 + + def _main(): """ Logic for inspecting an object given at command line """ import argparse diff --git a/Lib/opcode.py b/Lib/opcode.py index c93abdfbb68690..3a439f4ecb0063 100644 --- a/Lib/opcode.py +++ b/Lib/opcode.py @@ -198,6 +198,8 @@ def pseudo_op(name, op, real_ops): jrel_op('JUMP_BACKWARD', 140) # Number of words to skip (backwards) name_op('LOAD_SUPER_ATTR', 141) def_op('CALL_FUNCTION_EX', 142) # Flags +def_op('LOAD_FAST_AND_CLEAR', 143) # Local variable number +haslocal.append(143) def_op('EXTENDED_ARG', 144) EXTENDED_ARG = 144 @@ -269,6 +271,8 @@ def pseudo_op(name, op, real_ops): pseudo_op('LOAD_ZERO_SUPER_METHOD', 264, ['LOAD_SUPER_ATTR']) pseudo_op('LOAD_ZERO_SUPER_ATTR', 265, ['LOAD_SUPER_ATTR']) +pseudo_op('STORE_FAST_MAYBE_NULL', 266, ['STORE_FAST']) + MAX_PSEUDO_OPCODE = MIN_PSEUDO_OPCODE + len(_pseudo_ops) - 1 del def_op, name_op, jrel_op, jabs_op, pseudo_op @@ -307,6 +311,21 @@ def pseudo_op(name, op, real_ops): ("NB_INPLACE_XOR", "^="), ] +_intrinsic_1_descs = [ + "INTRINSIC_1_INVALID", + "INTRINSIC_PRINT", + "INTRINSIC_IMPORT_STAR", + "INTRINSIC_STOPITERATION_ERROR", + "INTRINSIC_ASYNC_GEN_WRAP", + "INTRINSIC_UNARY_POSITIVE", + "INTRINSIC_LIST_TO_TUPLE", +] + +_intrinsic_2_descs = [ + 'INTRINSIC_2_INVALID', + 'INTRINSIC_PREP_RERAISE_STAR', + ] + _specializations = { "BINARY_OP": [ "BINARY_OP_ADD_FLOAT", diff --git a/Lib/pathlib.py b/Lib/pathlib.py index f43f01ef41a97f..20ec1ce9d80374 100644 --- a/Lib/pathlib.py +++ b/Lib/pathlib.py @@ -54,40 +54,46 @@ def _ignore_error(exception): getattr(exception, 'winerror', None) in _IGNORED_WINERRORS) -def _is_wildcard_pattern(pat): - # Whether this pattern needs actual matching using fnmatch, or can - # be looked up directly as a file. - return "*" in pat or "?" in pat or "[" in pat +def _is_case_sensitive(flavour): + return flavour.normcase('Aa') == 'Aa' # # Globbing helpers # @functools.lru_cache() -def _make_selector(pattern_parts, flavour): +def _make_selector(pattern_parts, flavour, case_sensitive): pat = pattern_parts[0] - child_parts = pattern_parts[1:] if not pat: return _TerminatingSelector() if pat == '**': - cls = _RecursiveWildcardSelector - elif '**' in pat: - raise ValueError("Invalid pattern: '**' can only be an entire path component") - elif _is_wildcard_pattern(pat): - cls = _WildcardSelector + child_parts_idx = 1 + while child_parts_idx < len(pattern_parts) and pattern_parts[child_parts_idx] == '**': + child_parts_idx += 1 + child_parts = pattern_parts[child_parts_idx:] + if '**' in child_parts: + cls = _DoubleRecursiveWildcardSelector + else: + cls = _RecursiveWildcardSelector else: - cls = _PreciseSelector - return cls(pat, child_parts, flavour) + child_parts = pattern_parts[1:] + if pat == '..': + cls = _ParentSelector + elif '**' in pat: + raise ValueError("Invalid pattern: '**' can only be an entire path component") + else: + cls = _WildcardSelector + return cls(pat, child_parts, flavour, case_sensitive) class _Selector: """A selector matches a specific glob pattern part against the children of a given path.""" - def __init__(self, child_parts, flavour): + def __init__(self, child_parts, flavour, case_sensitive): self.child_parts = child_parts if child_parts: - self.successor = _make_selector(child_parts, flavour) + self.successor = _make_selector(child_parts, flavour, case_sensitive) self.dironly = True else: self.successor = _TerminatingSelector() @@ -97,44 +103,40 @@ def select_from(self, parent_path): """Iterate over all child paths of `parent_path` matched by this selector. This can contain parent_path itself.""" path_cls = type(parent_path) - is_dir = path_cls.is_dir - exists = path_cls.exists scandir = path_cls._scandir - normcase = path_cls._flavour.normcase - if not is_dir(parent_path): + if not parent_path.is_dir(): return iter([]) - return self._select_from(parent_path, is_dir, exists, scandir, normcase) + return self._select_from(parent_path, scandir) class _TerminatingSelector: - def _select_from(self, parent_path, is_dir, exists, scandir, normcase): + def _select_from(self, parent_path, scandir): yield parent_path -class _PreciseSelector(_Selector): +class _ParentSelector(_Selector): - def __init__(self, name, child_parts, flavour): - self.name = name - _Selector.__init__(self, child_parts, flavour) + def __init__(self, name, child_parts, flavour, case_sensitive): + _Selector.__init__(self, child_parts, flavour, case_sensitive) - def _select_from(self, parent_path, is_dir, exists, scandir, normcase): - try: - path = parent_path._make_child_relpath(self.name) - if (is_dir if self.dironly else exists)(path): - for p in self.successor._select_from(path, is_dir, exists, scandir, normcase): - yield p - except PermissionError: - return + def _select_from(self, parent_path, scandir): + path = parent_path._make_child_relpath('..') + for p in self.successor._select_from(path, scandir): + yield p class _WildcardSelector(_Selector): - def __init__(self, pat, child_parts, flavour): - self.match = re.compile(fnmatch.translate(flavour.normcase(pat))).fullmatch - _Selector.__init__(self, child_parts, flavour) + def __init__(self, pat, child_parts, flavour, case_sensitive): + _Selector.__init__(self, child_parts, flavour, case_sensitive) + if case_sensitive is None: + # TODO: evaluate case-sensitivity of each directory in _select_from() + case_sensitive = _is_case_sensitive(flavour) + flags = re.NOFLAG if case_sensitive else re.IGNORECASE + self.match = re.compile(fnmatch.translate(pat), flags=flags).fullmatch - def _select_from(self, parent_path, is_dir, exists, scandir, normcase): + def _select_from(self, parent_path, scandir): try: # We must close the scandir() object before proceeding to # avoid exhausting file descriptors when globbing deep trees. @@ -153,9 +155,9 @@ def _select_from(self, parent_path, is_dir, exists, scandir, normcase): raise continue name = entry.name - if self.match(normcase(name)): + if self.match(name): path = parent_path._make_child_relpath(name) - for p in self.successor._select_from(path, is_dir, exists, scandir, normcase): + for p in self.successor._select_from(path, scandir): yield p except PermissionError: return @@ -163,10 +165,10 @@ def _select_from(self, parent_path, is_dir, exists, scandir, normcase): class _RecursiveWildcardSelector(_Selector): - def __init__(self, pat, child_parts, flavour): - _Selector.__init__(self, child_parts, flavour) + def __init__(self, pat, child_parts, flavour, case_sensitive): + _Selector.__init__(self, child_parts, flavour, case_sensitive) - def _iterate_directories(self, parent_path, is_dir, scandir): + def _iterate_directories(self, parent_path, scandir): yield parent_path try: # We must close the scandir() object before proceeding to @@ -182,27 +184,39 @@ def _iterate_directories(self, parent_path, is_dir, scandir): raise if entry_is_dir and not entry.is_symlink(): path = parent_path._make_child_relpath(entry.name) - for p in self._iterate_directories(path, is_dir, scandir): + for p in self._iterate_directories(path, scandir): yield p except PermissionError: return - def _select_from(self, parent_path, is_dir, exists, scandir, normcase): + def _select_from(self, parent_path, scandir): try: - yielded = set() - try: - successor_select = self.successor._select_from - for starting_point in self._iterate_directories(parent_path, is_dir, scandir): - for p in successor_select(starting_point, is_dir, exists, scandir, normcase): - if p not in yielded: - yield p - yielded.add(p) - finally: - yielded.clear() + successor_select = self.successor._select_from + for starting_point in self._iterate_directories(parent_path, scandir): + for p in successor_select(starting_point, scandir): + yield p except PermissionError: return +class _DoubleRecursiveWildcardSelector(_RecursiveWildcardSelector): + """ + Like _RecursiveWildcardSelector, but also de-duplicates results from + successive selectors. This is necessary if the pattern contains + multiple non-adjacent '**' segments. + """ + + def _select_from(self, parent_path, scandir): + yielded = set() + try: + for p in super()._select_from(parent_path, scandir): + if p not in yielded: + yield p + yielded.add(p) + finally: + yielded.clear() + + # # Public API # @@ -210,11 +224,10 @@ def _select_from(self, parent_path, is_dir, exists, scandir, normcase): class _PathParents(Sequence): """This object provides sequence-like access to the logical ancestors of a path. Don't try to construct it yourself.""" - __slots__ = ('_pathcls', '_drv', '_root', '_tail') + __slots__ = ('_path', '_drv', '_root', '_tail') def __init__(self, path): - # We don't store the instance to avoid reference cycles - self._pathcls = type(path) + self._path = path self._drv = path.drive self._root = path.root self._tail = path._tail @@ -230,11 +243,11 @@ def __getitem__(self, idx): raise IndexError(idx) if idx < 0: idx += len(self) - return self._pathcls._from_parsed_parts(self._drv, self._root, - self._tail[:-idx - 1]) + return self._path._from_parsed_parts(self._drv, self._root, + self._tail[:-idx - 1]) def __repr__(self): - return "<{}.parents>".format(self._pathcls.__name__) + return "<{}.parents>".format(type(self._path).__name__) class PurePath(object): @@ -300,18 +313,34 @@ def __reduce__(self): return (self.__class__, self.parts) def __init__(self, *args): - if not args: - path = '' - elif len(args) == 1: - path = os.fspath(args[0]) + paths = [] + for arg in args: + if isinstance(arg, PurePath): + path = arg._raw_path + else: + try: + path = os.fspath(arg) + except TypeError: + path = arg + if not isinstance(path, str): + raise TypeError( + "argument should be a str or an os.PathLike " + "object where __fspath__ returns a str, " + f"not {type(path).__name__!r}") + paths.append(path) + if len(paths) == 0: + self._raw_path = '' + elif len(paths) == 1: + self._raw_path = paths[0] else: - path = self._flavour.join(*args) - if not isinstance(path, str): - raise TypeError( - "argument should be a str or an os.PathLike " - "object where __fspath__ returns a str, " - f"not {type(path).__name__!r}") - self._raw_path = path + self._raw_path = self._flavour.join(*paths) + + def with_segments(self, *pathsegments): + """Construct a new path object from any number of path-like objects. + Subclasses may override this method to customize how new path objects + are created from methods like `iterdir()`. + """ + return type(self)(*pathsegments) @classmethod def _parse_path(cls, path): @@ -339,15 +368,14 @@ def _load_parts(self): self._root = root self._tail_cached = tail - @classmethod - def _from_parsed_parts(cls, drv, root, tail): - path = cls._format_parsed_parts(drv, root, tail) - self = cls(path) - self._str = path or '.' - self._drv = drv - self._root = root - self._tail_cached = tail - return self + def _from_parsed_parts(self, drv, root, tail): + path_str = self._format_parsed_parts(drv, root, tail) + path = self.with_segments(path_str) + path._str = path_str or '.' + path._drv = drv + path._root = root + path._tail_cached = tail + return path @classmethod def _format_parsed_parts(cls, drv, root, tail): @@ -581,8 +609,7 @@ def relative_to(self, other, /, *_deprecated, walk_up=False): "scheduled for removal in Python {remove}") warnings._deprecated("pathlib.PurePath.relative_to(*args)", msg, remove=(3, 14)) - path_cls = type(self) - other = path_cls(other, *_deprecated) + other = self.with_segments(other, *_deprecated) for step, path in enumerate([other] + list(other.parents)): if self.is_relative_to(path): break @@ -591,7 +618,7 @@ def relative_to(self, other, /, *_deprecated, walk_up=False): if step and not walk_up: raise ValueError(f"{str(self)!r} is not in the subpath of {str(other)!r}") parts = ['..'] * step + self._tail[len(path._tail):] - return path_cls(*parts) + return self.with_segments(*parts) def is_relative_to(self, other, /, *_deprecated): """Return True if the path is relative to another path or False. @@ -602,7 +629,7 @@ def is_relative_to(self, other, /, *_deprecated): "scheduled for removal in Python {remove}") warnings._deprecated("pathlib.PurePath.is_relative_to(*args)", msg, remove=(3, 14)) - other = type(self)(other, *_deprecated) + other = self.with_segments(other, *_deprecated) return other == self or other in self.parents @property @@ -614,13 +641,13 @@ def parts(self): else: return tuple(self._tail) - def joinpath(self, *args): + def joinpath(self, *pathsegments): """Combine this path with one or several arguments, and return a new path representing either a subpath (if all arguments are relative paths) or a totally different path (if one of the arguments is anchored). """ - return self.__class__(self._raw_path, *args) + return self.with_segments(self, *pathsegments) def __truediv__(self, key): try: @@ -630,7 +657,7 @@ def __truediv__(self, key): def __rtruediv__(self, key): try: - return type(self)(key, self._raw_path) + return self.with_segments(key, self) except TypeError: return NotImplemented @@ -647,6 +674,8 @@ def parent(self): @property def parents(self): """A sequence of this path's logical parents.""" + # The value of this property should not be cached on the path object, + # as doing so would introduce a reference cycle. return _PathParents(self) def is_absolute(self): @@ -655,7 +684,7 @@ def is_absolute(self): # ntpath.isabs() is defective - see GH-44626 . if self._flavour is ntpath: return bool(self.drive and self.root) - return self._flavour.isabs(self) + return self._flavour.isabs(self._raw_path) def is_reserved(self): """Return True if the path contains one of the special names reserved @@ -677,7 +706,7 @@ def match(self, path_pattern): """ Return True if this path matches the given pattern. """ - pat = type(self)(path_pattern) + pat = self.with_segments(path_pattern) if not pat.parts: raise ValueError("empty pattern") pat_parts = pat._parts_normcase @@ -731,211 +760,191 @@ class Path(PurePath): """ __slots__ = () - def __init__(self, *args, **kwargs): - if kwargs: - msg = ("support for supplying keyword arguments to pathlib.PurePath " - "is deprecated and scheduled for removal in Python {remove}") - warnings._deprecated("pathlib.PurePath(**kwargs)", msg, remove=(3, 14)) - super().__init__(*args) - - def __new__(cls, *args, **kwargs): - if cls is Path: - cls = WindowsPath if os.name == 'nt' else PosixPath - return object.__new__(cls) - - def _make_child_relpath(self, name): - path_str = str(self) - tail = self._tail - if tail: - path_str = f'{path_str}{self._flavour.sep}{name}' - elif path_str != '.': - path_str = f'{path_str}{name}' - else: - path_str = name - path = type(self)(path_str) - path._str = path_str - path._drv = self.drive - path._root = self.root - path._tail_cached = tail + [name] - return path - - def __enter__(self): - # In previous versions of pathlib, __exit__() marked this path as - # closed; subsequent attempts to perform I/O would raise an IOError. - # This functionality was never documented, and had the effect of - # making Path objects mutable, contrary to PEP 428. - # In Python 3.9 __exit__() was made a no-op. - # In Python 3.11 __enter__() began emitting DeprecationWarning. - # In Python 3.13 __enter__() and __exit__() should be removed. - warnings.warn("pathlib.Path.__enter__() is deprecated and scheduled " - "for removal in Python 3.13; Path objects as a context " - "manager is a no-op", - DeprecationWarning, stacklevel=2) - return self - - def __exit__(self, t, v, tb): - pass - - # Public API - - @classmethod - def cwd(cls): - """Return a new path pointing to the current working directory.""" - # We call 'absolute()' rather than using 'os.getcwd()' directly to - # enable users to replace the implementation of 'absolute()' in a - # subclass and benefit from the new behaviour here. This works because - # os.path.abspath('.') == os.getcwd(). - return cls().absolute() - - @classmethod - def home(cls): - """Return a new path pointing to the user's home directory (as - returned by os.path.expanduser('~')). + def stat(self, *, follow_symlinks=True): """ - return cls("~").expanduser() - - def samefile(self, other_path): - """Return whether other_path is the same or not as this file - (as returned by os.path.samefile()). + Return the result of the stat() system call on this path, like + os.stat() does. """ - st = self.stat() - try: - other_st = other_path.stat() - except AttributeError: - other_st = self.__class__(other_path).stat() - return self._flavour.samestat(st, other_st) - - def iterdir(self): - """Yield path objects of the directory contents. + return os.stat(self, follow_symlinks=follow_symlinks) - The children are yielded in arbitrary order, and the - special entries '.' and '..' are not included. + def lstat(self): """ - for name in os.listdir(self): - yield self._make_child_relpath(name) - - def _scandir(self): - # bpo-24132: a future version of pathlib will support subclassing of - # pathlib.Path to customize how the filesystem is accessed. This - # includes scandir(), which is used to implement glob(). - return os.scandir(self) - - def glob(self, pattern): - """Iterate over this subtree and yield all existing files (of any - kind, including directories) matching the given relative pattern. + Like stat(), except if the path points to a symlink, the symlink's + status information is returned, rather than its target's. """ - sys.audit("pathlib.Path.glob", self, pattern) - if not pattern: - raise ValueError("Unacceptable pattern: {!r}".format(pattern)) - drv, root, pattern_parts = self._parse_path(pattern) - if drv or root: - raise NotImplementedError("Non-relative patterns are unsupported") - if pattern[-1] in (self._flavour.sep, self._flavour.altsep): - pattern_parts.append('') - selector = _make_selector(tuple(pattern_parts), self._flavour) - for p in selector.select_from(self): - yield p + return self.stat(follow_symlinks=False) - def rglob(self, pattern): - """Recursively yield all existing files (of any kind, including - directories) matching the given relative pattern, anywhere in - this subtree. - """ - sys.audit("pathlib.Path.rglob", self, pattern) - drv, root, pattern_parts = self._parse_path(pattern) - if drv or root: - raise NotImplementedError("Non-relative patterns are unsupported") - if pattern and pattern[-1] in (self._flavour.sep, self._flavour.altsep): - pattern_parts.append('') - selector = _make_selector(("**",) + tuple(pattern_parts), self._flavour) - for p in selector.select_from(self): - yield p - def absolute(self): - """Return an absolute version of this path by prepending the current - working directory. No normalization or symlink resolution is performed. + # Convenience functions for querying the stat results - Use resolve() to get the canonical path to a file. + def exists(self, *, follow_symlinks=True): """ - if self.is_absolute(): - return self - elif self.drive: - # There is a CWD on each drive-letter drive. - cwd = self._flavour.abspath(self.drive) - else: - cwd = os.getcwd() - return type(self)(cwd, self._raw_path) + Whether this path exists. - def resolve(self, strict=False): - """ - Make the path absolute, resolving all symlinks on the way and also - normalizing it. + This method normally follows symlinks; to check whether a symlink exists, + add the argument follow_symlinks=False. """ - - def check_eloop(e): - winerror = getattr(e, 'winerror', 0) - if e.errno == ELOOP or winerror == _WINERROR_CANT_RESOLVE_FILENAME: - raise RuntimeError("Symlink loop from %r" % e.filename) - try: - s = self._flavour.realpath(self, strict=strict) + self.stat(follow_symlinks=follow_symlinks) except OSError as e: - check_eloop(e) - raise - p = type(self)(s) - - # In non-strict mode, realpath() doesn't raise on symlink loops. - # Ensure we get an exception by calling stat() - if not strict: - try: - p.stat() - except OSError as e: - check_eloop(e) - return p + if not _ignore_error(e): + raise + return False + except ValueError: + # Non-encodable path + return False + return True - def stat(self, *, follow_symlinks=True): + def is_dir(self): """ - Return the result of the stat() system call on this path, like - os.stat() does. + Whether this path is a directory. """ - return os.stat(self, follow_symlinks=follow_symlinks) + try: + return S_ISDIR(self.stat().st_mode) + except OSError as e: + if not _ignore_error(e): + raise + # Path doesn't exist or is a broken symlink + # (see http://web.archive.org/web/20200623061726/https://bitbucket.org/pitrou/pathlib/issues/12/ ) + return False + except ValueError: + # Non-encodable path + return False - def owner(self): + def is_file(self): """ - Return the login name of the file owner. + Whether this path is a regular file (also True for symlinks pointing + to regular files). """ try: - import pwd - return pwd.getpwuid(self.stat().st_uid).pw_name - except ImportError: - raise NotImplementedError("Path.owner() is unsupported on this system") + return S_ISREG(self.stat().st_mode) + except OSError as e: + if not _ignore_error(e): + raise + # Path doesn't exist or is a broken symlink + # (see http://web.archive.org/web/20200623061726/https://bitbucket.org/pitrou/pathlib/issues/12/ ) + return False + except ValueError: + # Non-encodable path + return False - def group(self): + def is_mount(self): """ - Return the group name of the file gid. + Check if this path is a mount point """ + return self._flavour.ismount(self) + def is_symlink(self): + """ + Whether this path is a symbolic link. + """ try: - import grp - return grp.getgrgid(self.stat().st_gid).gr_name - except ImportError: - raise NotImplementedError("Path.group() is unsupported on this system") + return S_ISLNK(self.lstat().st_mode) + except OSError as e: + if not _ignore_error(e): + raise + # Path doesn't exist + return False + except ValueError: + # Non-encodable path + return False - def open(self, mode='r', buffering=-1, encoding=None, - errors=None, newline=None): + def is_junction(self): """ - Open the file pointed by this path and return a file object, as - the built-in open() function does. + Whether this path is a junction. """ - if "b" not in mode: - encoding = io.text_encoding(encoding) - return io.open(self, mode, buffering, encoding, errors, newline) + return self._flavour.isjunction(self) - def read_bytes(self): + def is_block_device(self): """ - Open the file in bytes mode, read it, and close the file. + Whether this path is a block device. """ - with self.open(mode='rb') as f: + try: + return S_ISBLK(self.stat().st_mode) + except OSError as e: + if not _ignore_error(e): + raise + # Path doesn't exist or is a broken symlink + # (see http://web.archive.org/web/20200623061726/https://bitbucket.org/pitrou/pathlib/issues/12/ ) + return False + except ValueError: + # Non-encodable path + return False + + def is_char_device(self): + """ + Whether this path is a character device. + """ + try: + return S_ISCHR(self.stat().st_mode) + except OSError as e: + if not _ignore_error(e): + raise + # Path doesn't exist or is a broken symlink + # (see http://web.archive.org/web/20200623061726/https://bitbucket.org/pitrou/pathlib/issues/12/ ) + return False + except ValueError: + # Non-encodable path + return False + + def is_fifo(self): + """ + Whether this path is a FIFO. + """ + try: + return S_ISFIFO(self.stat().st_mode) + except OSError as e: + if not _ignore_error(e): + raise + # Path doesn't exist or is a broken symlink + # (see http://web.archive.org/web/20200623061726/https://bitbucket.org/pitrou/pathlib/issues/12/ ) + return False + except ValueError: + # Non-encodable path + return False + + def is_socket(self): + """ + Whether this path is a socket. + """ + try: + return S_ISSOCK(self.stat().st_mode) + except OSError as e: + if not _ignore_error(e): + raise + # Path doesn't exist or is a broken symlink + # (see http://web.archive.org/web/20200623061726/https://bitbucket.org/pitrou/pathlib/issues/12/ ) + return False + except ValueError: + # Non-encodable path + return False + + def samefile(self, other_path): + """Return whether other_path is the same or not as this file + (as returned by os.path.samefile()). + """ + st = self.stat() + try: + other_st = other_path.stat() + except AttributeError: + other_st = self.with_segments(other_path).stat() + return self._flavour.samestat(st, other_st) + + def open(self, mode='r', buffering=-1, encoding=None, + errors=None, newline=None): + """ + Open the file pointed by this path and return a file object, as + the built-in open() function does. + """ + if "b" not in mode: + encoding = io.text_encoding(encoding) + return io.open(self, mode, buffering, encoding, errors, newline) + + def read_bytes(self): + """ + Open the file in bytes mode, read it, and close the file. + """ + with self.open(mode='rb') as f: return f.read() def read_text(self, encoding=None, errors=None): @@ -966,13 +975,239 @@ def write_text(self, data, encoding=None, errors=None, newline=None): with self.open(mode='w', encoding=encoding, errors=errors, newline=newline) as f: return f.write(data) + def iterdir(self): + """Yield path objects of the directory contents. + + The children are yielded in arbitrary order, and the + special entries '.' and '..' are not included. + """ + for name in os.listdir(self): + yield self._make_child_relpath(name) + + def _scandir(self): + # bpo-24132: a future version of pathlib will support subclassing of + # pathlib.Path to customize how the filesystem is accessed. This + # includes scandir(), which is used to implement glob(). + return os.scandir(self) + + def _make_child_relpath(self, name): + path_str = str(self) + tail = self._tail + if tail: + path_str = f'{path_str}{self._flavour.sep}{name}' + elif path_str != '.': + path_str = f'{path_str}{name}' + else: + path_str = name + path = self.with_segments(path_str) + path._str = path_str + path._drv = self.drive + path._root = self.root + path._tail_cached = tail + [name] + return path + + def glob(self, pattern, *, case_sensitive=None): + """Iterate over this subtree and yield all existing files (of any + kind, including directories) matching the given relative pattern. + """ + sys.audit("pathlib.Path.glob", self, pattern) + if not pattern: + raise ValueError("Unacceptable pattern: {!r}".format(pattern)) + drv, root, pattern_parts = self._parse_path(pattern) + if drv or root: + raise NotImplementedError("Non-relative patterns are unsupported") + if pattern[-1] in (self._flavour.sep, self._flavour.altsep): + pattern_parts.append('') + selector = _make_selector(tuple(pattern_parts), self._flavour, case_sensitive) + for p in selector.select_from(self): + yield p + + def rglob(self, pattern, *, case_sensitive=None): + """Recursively yield all existing files (of any kind, including + directories) matching the given relative pattern, anywhere in + this subtree. + """ + sys.audit("pathlib.Path.rglob", self, pattern) + drv, root, pattern_parts = self._parse_path(pattern) + if drv or root: + raise NotImplementedError("Non-relative patterns are unsupported") + if pattern and pattern[-1] in (self._flavour.sep, self._flavour.altsep): + pattern_parts.append('') + selector = _make_selector(("**",) + tuple(pattern_parts), self._flavour, case_sensitive) + for p in selector.select_from(self): + yield p + + def walk(self, top_down=True, on_error=None, follow_symlinks=False): + """Walk the directory tree from this directory, similar to os.walk().""" + sys.audit("pathlib.Path.walk", self, on_error, follow_symlinks) + paths = [self] + + while paths: + path = paths.pop() + if isinstance(path, tuple): + yield path + continue + + # We may not have read permission for self, in which case we can't + # get a list of the files the directory contains. os.walk() + # always suppressed the exception in that instance, rather than + # blow up for a minor reason when (say) a thousand readable + # directories are still left to visit. That logic is copied here. + try: + scandir_it = path._scandir() + except OSError as error: + if on_error is not None: + on_error(error) + continue + + with scandir_it: + dirnames = [] + filenames = [] + for entry in scandir_it: + try: + is_dir = entry.is_dir(follow_symlinks=follow_symlinks) + except OSError: + # Carried over from os.path.isdir(). + is_dir = False + + if is_dir: + dirnames.append(entry.name) + else: + filenames.append(entry.name) + + if top_down: + yield path, dirnames, filenames + else: + paths.append((path, dirnames, filenames)) + + paths += [path._make_child_relpath(d) for d in reversed(dirnames)] + + def __init__(self, *args, **kwargs): + if kwargs: + msg = ("support for supplying keyword arguments to pathlib.PurePath " + "is deprecated and scheduled for removal in Python {remove}") + warnings._deprecated("pathlib.PurePath(**kwargs)", msg, remove=(3, 14)) + super().__init__(*args) + + def __new__(cls, *args, **kwargs): + if cls is Path: + cls = WindowsPath if os.name == 'nt' else PosixPath + return object.__new__(cls) + + def __enter__(self): + # In previous versions of pathlib, __exit__() marked this path as + # closed; subsequent attempts to perform I/O would raise an IOError. + # This functionality was never documented, and had the effect of + # making Path objects mutable, contrary to PEP 428. + # In Python 3.9 __exit__() was made a no-op. + # In Python 3.11 __enter__() began emitting DeprecationWarning. + # In Python 3.13 __enter__() and __exit__() should be removed. + warnings.warn("pathlib.Path.__enter__() is deprecated and scheduled " + "for removal in Python 3.13; Path objects as a context " + "manager is a no-op", + DeprecationWarning, stacklevel=2) + return self + + def __exit__(self, t, v, tb): + pass + + # Public API + + @classmethod + def cwd(cls): + """Return a new path pointing to the current working directory.""" + # We call 'absolute()' rather than using 'os.getcwd()' directly to + # enable users to replace the implementation of 'absolute()' in a + # subclass and benefit from the new behaviour here. This works because + # os.path.abspath('.') == os.getcwd(). + return cls().absolute() + + @classmethod + def home(cls): + """Return a new path pointing to the user's home directory (as + returned by os.path.expanduser('~')). + """ + return cls("~").expanduser() + + def absolute(self): + """Return an absolute version of this path by prepending the current + working directory. No normalization or symlink resolution is performed. + + Use resolve() to get the canonical path to a file. + """ + if self.is_absolute(): + return self + elif self.drive: + # There is a CWD on each drive-letter drive. + cwd = self._flavour.abspath(self.drive) + else: + cwd = os.getcwd() + # Fast path for "empty" paths, e.g. Path("."), Path("") or Path(). + # We pass only one argument to with_segments() to avoid the cost + # of joining, and we exploit the fact that getcwd() returns a + # fully-normalized string by storing it in _str. This is used to + # implement Path.cwd(). + if not self.root and not self._tail: + result = self.with_segments(cwd) + result._str = cwd + return result + return self.with_segments(cwd, self) + + def resolve(self, strict=False): + """ + Make the path absolute, resolving all symlinks on the way and also + normalizing it. + """ + + def check_eloop(e): + winerror = getattr(e, 'winerror', 0) + if e.errno == ELOOP or winerror == _WINERROR_CANT_RESOLVE_FILENAME: + raise RuntimeError("Symlink loop from %r" % e.filename) + + try: + s = self._flavour.realpath(self, strict=strict) + except OSError as e: + check_eloop(e) + raise + p = self.with_segments(s) + + # In non-strict mode, realpath() doesn't raise on symlink loops. + # Ensure we get an exception by calling stat() + if not strict: + try: + p.stat() + except OSError as e: + check_eloop(e) + return p + + def owner(self): + """ + Return the login name of the file owner. + """ + try: + import pwd + return pwd.getpwuid(self.stat().st_uid).pw_name + except ImportError: + raise NotImplementedError("Path.owner() is unsupported on this system") + + def group(self): + """ + Return the group name of the file gid. + """ + + try: + import grp + return grp.getgrgid(self.stat().st_gid).gr_name + except ImportError: + raise NotImplementedError("Path.group() is unsupported on this system") + def readlink(self): """ Return the path to which the symbolic link points. """ if not hasattr(os, "readlink"): raise NotImplementedError("os.readlink() not available on this system") - return type(self)(os.readlink(self)) + return self.with_segments(os.readlink(self)) def touch(self, mode=0o666, exist_ok=True): """ @@ -1043,13 +1278,6 @@ def rmdir(self): """ os.rmdir(self) - def lstat(self): - """ - Like stat(), except if the path points to a symlink, the symlink's - status information is returned, rather than its target's. - """ - return self.stat(follow_symlinks=False) - def rename(self, target): """ Rename this path to the target path. @@ -1061,7 +1289,7 @@ def rename(self, target): Returns the new Path instance pointing to the target path. """ os.rename(self, target) - return self.__class__(target) + return self.with_segments(target) def replace(self, target): """ @@ -1074,7 +1302,7 @@ def replace(self, target): Returns the new Path instance pointing to the target path. """ os.replace(self, target) - return self.__class__(target) + return self.with_segments(target) def symlink_to(self, target, target_is_directory=False): """ @@ -1095,148 +1323,6 @@ def hardlink_to(self, target): raise NotImplementedError("os.link() not available on this system") os.link(target, self) - - # Convenience functions for querying the stat results - - def exists(self): - """ - Whether this path exists. - """ - try: - self.stat() - except OSError as e: - if not _ignore_error(e): - raise - return False - except ValueError: - # Non-encodable path - return False - return True - - def is_dir(self): - """ - Whether this path is a directory. - """ - try: - return S_ISDIR(self.stat().st_mode) - except OSError as e: - if not _ignore_error(e): - raise - # Path doesn't exist or is a broken symlink - # (see http://web.archive.org/web/20200623061726/https://bitbucket.org/pitrou/pathlib/issues/12/ ) - return False - except ValueError: - # Non-encodable path - return False - - def is_file(self): - """ - Whether this path is a regular file (also True for symlinks pointing - to regular files). - """ - try: - return S_ISREG(self.stat().st_mode) - except OSError as e: - if not _ignore_error(e): - raise - # Path doesn't exist or is a broken symlink - # (see http://web.archive.org/web/20200623061726/https://bitbucket.org/pitrou/pathlib/issues/12/ ) - return False - except ValueError: - # Non-encodable path - return False - - def is_mount(self): - """ - Check if this path is a mount point - """ - return self._flavour.ismount(self) - - def is_symlink(self): - """ - Whether this path is a symbolic link. - """ - try: - return S_ISLNK(self.lstat().st_mode) - except OSError as e: - if not _ignore_error(e): - raise - # Path doesn't exist - return False - except ValueError: - # Non-encodable path - return False - - def is_junction(self): - """ - Whether this path is a junction. - """ - return self._flavour.isjunction(self) - - def is_block_device(self): - """ - Whether this path is a block device. - """ - try: - return S_ISBLK(self.stat().st_mode) - except OSError as e: - if not _ignore_error(e): - raise - # Path doesn't exist or is a broken symlink - # (see http://web.archive.org/web/20200623061726/https://bitbucket.org/pitrou/pathlib/issues/12/ ) - return False - except ValueError: - # Non-encodable path - return False - - def is_char_device(self): - """ - Whether this path is a character device. - """ - try: - return S_ISCHR(self.stat().st_mode) - except OSError as e: - if not _ignore_error(e): - raise - # Path doesn't exist or is a broken symlink - # (see http://web.archive.org/web/20200623061726/https://bitbucket.org/pitrou/pathlib/issues/12/ ) - return False - except ValueError: - # Non-encodable path - return False - - def is_fifo(self): - """ - Whether this path is a FIFO. - """ - try: - return S_ISFIFO(self.stat().st_mode) - except OSError as e: - if not _ignore_error(e): - raise - # Path doesn't exist or is a broken symlink - # (see http://web.archive.org/web/20200623061726/https://bitbucket.org/pitrou/pathlib/issues/12/ ) - return False - except ValueError: - # Non-encodable path - return False - - def is_socket(self): - """ - Whether this path is a socket. - """ - try: - return S_ISSOCK(self.stat().st_mode) - except OSError as e: - if not _ignore_error(e): - raise - # Path doesn't exist or is a broken symlink - # (see http://web.archive.org/web/20200623061726/https://bitbucket.org/pitrou/pathlib/issues/12/ ) - return False - except ValueError: - # Non-encodable path - return False - def expanduser(self): """ Return a new path with expanded ~ and ~user constructs (as returned by os.path.expanduser) @@ -1251,51 +1337,6 @@ def expanduser(self): return self - def walk(self, top_down=True, on_error=None, follow_symlinks=False): - """Walk the directory tree from this directory, similar to os.walk().""" - sys.audit("pathlib.Path.walk", self, on_error, follow_symlinks) - paths = [self] - - while paths: - path = paths.pop() - if isinstance(path, tuple): - yield path - continue - - # We may not have read permission for self, in which case we can't - # get a list of the files the directory contains. os.walk() - # always suppressed the exception in that instance, rather than - # blow up for a minor reason when (say) a thousand readable - # directories are still left to visit. That logic is copied here. - try: - scandir_it = path._scandir() - except OSError as error: - if on_error is not None: - on_error(error) - continue - - with scandir_it: - dirnames = [] - filenames = [] - for entry in scandir_it: - try: - is_dir = entry.is_dir(follow_symlinks=follow_symlinks) - except OSError: - # Carried over from os.path.isdir(). - is_dir = False - - if is_dir: - dirnames.append(entry.name) - else: - filenames.append(entry.name) - - if top_down: - yield path, dirnames, filenames - else: - paths.append((path, dirnames, filenames)) - - paths += [path._make_child_relpath(d) for d in reversed(dirnames)] - class PosixPath(Path, PurePosixPath): """Path subclass for non-Windows systems. diff --git a/Lib/pdb.py b/Lib/pdb.py index 645cbf518e58e3..b3dc5a455e56b9 100755 --- a/Lib/pdb.py +++ b/Lib/pdb.py @@ -270,6 +270,8 @@ def forget(self): self.lineno = None self.stack = [] self.curindex = 0 + if hasattr(self, 'curframe') and self.curframe: + self.curframe.f_globals.pop('__pdb_convenience_variables', None) self.curframe = None self.tb_lineno.clear() @@ -288,6 +290,7 @@ def setup(self, f, tb): # locals whenever the .f_locals accessor is called, so we # cache it here to ensure that modifications are not overwritten. self.curframe_locals = self.curframe.f_locals + self.set_convenience_variable(self.curframe, '_frame', self.curframe) return self.execRcLines() # Can be executed earlier than 'setup' if desired @@ -359,6 +362,7 @@ def user_return(self, frame, return_value): if self._wait_for_mainpyfile: return frame.f_locals['__return__'] = return_value + self.set_convenience_variable(frame, '_retval', return_value) self.message('--Return--') self.interaction(frame, None) @@ -369,6 +373,7 @@ def user_exception(self, frame, exc_info): return exc_type, exc_value, exc_traceback = exc_info frame.f_locals['__exception__'] = exc_type, exc_value + self.set_convenience_variable(frame, '_exception', exc_value) # An 'Internal StopIteration' exception is an exception debug event # issued by the interpreter when handling a subgenerator run with @@ -394,6 +399,7 @@ def _cmdloop(self): self.message('--KeyboardInterrupt--') # Called before loop, handles display expressions + # Set up convenience variable containers def preloop(self): displaying = self.displaying.get(self.curframe) if displaying: @@ -477,6 +483,9 @@ def precmd(self, line): next = line[marker+2:].lstrip() self.cmdqueue.append(next) line = line[:marker].rstrip() + + # Replace all the convenience variables + line = re.sub(r'\$([a-zA-Z_][a-zA-Z0-9_]*)', r'__pdb_convenience_variables["\1"]', line) return line def onecmd(self, line): @@ -527,6 +536,13 @@ def message(self, msg): def error(self, msg): print('***', msg, file=self.stdout) + # convenience variables + + def set_convenience_variable(self, frame, name, value): + if '__pdb_convenience_variables' not in frame.f_globals: + frame.f_globals['__pdb_convenience_variables'] = {} + frame.f_globals['__pdb_convenience_variables'][name] = value + # Generic completion functions. Individual complete_foo methods can be # assigned below to one of these functions. @@ -1018,6 +1034,7 @@ def _select_frame(self, number): self.curindex = number self.curframe = self.stack[self.curindex][0] self.curframe_locals = self.curframe.f_locals + self.set_convenience_variable(self.curframe, '_frame', self.curframe) self.print_stack_entry(self.stack[self.curindex]) self.lineno = None diff --git a/Lib/pkgutil.py b/Lib/pkgutil.py index fb977eaaa05767..dccbec52aa731e 100644 --- a/Lib/pkgutil.py +++ b/Lib/pkgutil.py @@ -23,20 +23,6 @@ ModuleInfo.__doc__ = 'A namedtuple with minimal info about a module.' -def _get_spec(finder, name): - """Return the finder-specific module spec.""" - # Works with legacy finders. - try: - find_spec = finder.find_spec - except AttributeError: - loader = finder.find_module(name) - if loader is None: - return None - return importlib.util.spec_from_loader(name, loader) - else: - return find_spec(name) - - def read_code(stream): # This helper is needed in order for the PEP 302 emulation to # correctly handle compiled files @@ -284,6 +270,10 @@ def get_loader(module_or_name): If the named module is not already imported, its containing package (if any) is imported, in order to establish the package __path__. """ + warnings._deprecated("pkgutil.get_loader", + f"{warnings._DEPRECATED_MSG}; " + "use importlib.util.find_spec() instead", + remove=(3, 14)) if module_or_name in sys.modules: module_or_name = sys.modules[module_or_name] if module_or_name is None: @@ -308,6 +298,10 @@ def find_loader(fullname): importlib.util.find_spec that converts most failures to ImportError and only returns the loader rather than the full spec """ + warnings._deprecated("pkgutil.find_loader", + f"{warnings._DEPRECATED_MSG}; " + "use importlib.util.find_spec() instead", + remove=(3, 14)) if fullname.startswith('.'): msg = "Relative module name {!r} not supported".format(fullname) raise ImportError(msg) diff --git a/Lib/pydoc.py b/Lib/pydoc.py index 1c3443fa8469f7..84e673a7f87f90 100755 --- a/Lib/pydoc.py +++ b/Lib/pydoc.py @@ -448,7 +448,7 @@ def safeimport(path, forceload=0, cache={}): # Prevent garbage collection. cache[key] = sys.modules[key] del sys.modules[key] - module = __import__(path) + module = importlib.import_module(path) except BaseException as err: # Did the error occur before or after the module was found? if path in sys.modules: @@ -463,9 +463,6 @@ def safeimport(path, forceload=0, cache={}): else: # Some other error occurred during the importing process. raise ErrorDuringImport(path, err) - for part in path.split('.')[1:]: - try: module = getattr(module, part) - except AttributeError: return None return module # ---------------------------------------------------- formatter base class @@ -2242,7 +2239,7 @@ def run(self, callback, key=None, completer=None, onerror=None): callback(None, modname, '') else: try: - spec = pkgutil._get_spec(importer, modname) + spec = importer.find_spec(modname) except SyntaxError: # raised by tests for bad coding cookies or BOM continue diff --git a/Lib/test/clinic.test b/Lib/test/clinic.test index 53e5df5ba872ed..564205274edd73 100644 --- a/Lib/test/clinic.test +++ b/Lib/test/clinic.test @@ -4102,3 +4102,172 @@ exit: static PyObject * test_paramname_module_impl(PyObject *module, PyObject *mod) /*[clinic end generated code: output=4a2a849ecbcc8b53 input=afefe259667f13ba]*/ + +/*[clinic input] +mangle1 + + args: object + kwnames: object + return_value: object + _keywords: object + _parser: object + argsbuf: object + fastargs: object + nargs: object + noptargs: object + +[clinic start generated code]*/ + +PyDoc_STRVAR(mangle1__doc__, +"mangle1($module, /, args, kwnames, return_value, _keywords, _parser,\n" +" argsbuf, fastargs, nargs, noptargs)\n" +"--\n" +"\n"); + +#define MANGLE1_METHODDEF \ + {"mangle1", _PyCFunction_CAST(mangle1), METH_FASTCALL|METH_KEYWORDS, mangle1__doc__}, + +static PyObject * +mangle1_impl(PyObject *module, PyObject *args, PyObject *kwnames, + PyObject *return_value, PyObject *_keywords, PyObject *_parser, + PyObject *argsbuf, PyObject *fastargs, PyObject *nargs, + PyObject *noptargs); + +static PyObject * +mangle1(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 9 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(args), &_Py_ID(kwnames), &_Py_ID(return_value), &_Py_ID(_keywords), &_Py_ID(_parser), &_Py_ID(argsbuf), &_Py_ID(fastargs), &_Py_ID(nargs), &_Py_ID(noptargs), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"args", "kwnames", "return_value", "_keywords", "_parser", "argsbuf", "fastargs", "nargs", "noptargs", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "mangle1", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[9]; + PyObject *__clinic_args; + PyObject *__clinic_kwnames; + PyObject *__clinic_return_value; + PyObject *__clinic__keywords; + PyObject *__clinic__parser; + PyObject *__clinic_argsbuf; + PyObject *__clinic_fastargs; + PyObject *__clinic_nargs; + PyObject *__clinic_noptargs; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 9, 9, 0, argsbuf); + if (!args) { + goto exit; + } + __clinic_args = args[0]; + __clinic_kwnames = args[1]; + __clinic_return_value = args[2]; + __clinic__keywords = args[3]; + __clinic__parser = args[4]; + __clinic_argsbuf = args[5]; + __clinic_fastargs = args[6]; + __clinic_nargs = args[7]; + __clinic_noptargs = args[8]; + return_value = mangle1_impl(module, __clinic_args, __clinic_kwnames, __clinic_return_value, __clinic__keywords, __clinic__parser, __clinic_argsbuf, __clinic_fastargs, __clinic_nargs, __clinic_noptargs); + +exit: + return return_value; +} + +static PyObject * +mangle1_impl(PyObject *module, PyObject *args, PyObject *kwnames, + PyObject *return_value, PyObject *_keywords, PyObject *_parser, + PyObject *argsbuf, PyObject *fastargs, PyObject *nargs, + PyObject *noptargs) +/*[clinic end generated code: output=083e5076be9987c3 input=a3ed51bdedf8a3c7]*/ + +/*[clinic input] +mangle2 + + args: object + kwargs: object + return_value: object + +[clinic start generated code]*/ + +PyDoc_STRVAR(mangle2__doc__, +"mangle2($module, /, args, kwargs, return_value)\n" +"--\n" +"\n"); + +#define MANGLE2_METHODDEF \ + {"mangle2", _PyCFunction_CAST(mangle2), METH_FASTCALL|METH_KEYWORDS, mangle2__doc__}, + +static PyObject * +mangle2_impl(PyObject *module, PyObject *args, PyObject *kwargs, + PyObject *return_value); + +static PyObject * +mangle2(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 3 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(args), &_Py_ID(kwargs), &_Py_ID(return_value), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"args", "kwargs", "return_value", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "mangle2", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[3]; + PyObject *__clinic_args; + PyObject *__clinic_kwargs; + PyObject *__clinic_return_value; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 3, 3, 0, argsbuf); + if (!args) { + goto exit; + } + __clinic_args = args[0]; + __clinic_kwargs = args[1]; + __clinic_return_value = args[2]; + return_value = mangle2_impl(module, __clinic_args, __clinic_kwargs, __clinic_return_value); + +exit: + return return_value; +} + +static PyObject * +mangle2_impl(PyObject *module, PyObject *args, PyObject *kwargs, + PyObject *return_value) +/*[clinic end generated code: output=2ebb62aaefe7590a input=391766fee51bad7a]*/ diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py index c5eb6e7f1643ee..55e061950ff280 100644 --- a/Lib/test/datetimetester.py +++ b/Lib/test/datetimetester.py @@ -2,18 +2,19 @@ See https://www.zope.dev/Members/fdrake/DateTimeWiki/TestCases """ -import io -import itertools import bisect import copy import decimal -import sys +import io +import itertools import os import pickle import random import re import struct +import sys import unittest +import warnings from array import array @@ -39,6 +40,10 @@ # Needed by test_datetime import _strptime +try: + import _pydatetime +except ImportError: + pass # pickle_loads = {pickle.loads, pickle._loads} @@ -47,11 +52,12 @@ for proto in range(pickle.HIGHEST_PROTOCOL + 1)] assert len(pickle_choices) == pickle.HIGHEST_PROTOCOL + 1 +EPOCH_NAIVE = datetime(1970, 1, 1, 0, 0) # For calculating transitions + # An arbitrary collection of objects of non-datetime types, for testing # mixed-type comparisons. OTHERSTUFF = (10, 34.5, "abc", {}, [], ()) - # XXX Copied from test_float. INF = float("inf") NAN = float("nan") @@ -92,7 +98,7 @@ def test_divide_and_round(self): if '_Fast' in self.__class__.__name__: self.skipTest('Only run for Pure Python implementation') - dar = datetime_module._divide_and_round + dar = _pydatetime._divide_and_round self.assertEqual(dar(-10, -3), 3) self.assertEqual(dar(5, -2), -2) @@ -2622,9 +2628,10 @@ def test_utcfromtimestamp_limits(self): for test_name, ts in test_cases: with self.subTest(test_name, ts=ts): with self.assertRaises((ValueError, OverflowError)): - # converting a Python int to C time_t can raise a - # OverflowError, especially on 32-bit platforms. - self.theclass.utcfromtimestamp(ts) + with self.assertWarns(DeprecationWarning): + # converting a Python int to C time_t can raise a + # OverflowError, especially on 32-bit platforms. + self.theclass.utcfromtimestamp(ts) def test_insane_fromtimestamp(self): # It's possible that some platform maps time_t to double, @@ -2641,8 +2648,9 @@ def test_insane_utcfromtimestamp(self): # exempt such platforms (provided they return reasonable # results!). for insane in -1e200, 1e200: - self.assertRaises(OverflowError, self.theclass.utcfromtimestamp, - insane) + with self.assertWarns(DeprecationWarning): + self.assertRaises(OverflowError, self.theclass.utcfromtimestamp, + insane) @unittest.skipIf(sys.platform == "win32", "Windows doesn't accept negative timestamps") def test_negative_float_fromtimestamp(self): @@ -3001,7 +3009,7 @@ def __new__(cls, *args, **kwargs): for name, meth_name, kwargs in test_cases: with self.subTest(name): constr = getattr(DateTimeSubclass, meth_name) - if constr == "utcnow": + if meth_name == "utcnow": with self.assertWarns(DeprecationWarning): dt = constr(**kwargs) else: @@ -4729,8 +4737,10 @@ def test_tzinfo_utcfromtimestamp(self): # Try with and without naming the keyword; for whatever reason, # utcfromtimestamp() doesn't accept a tzinfo argument. off42 = FixedOffset(42, "42") - self.assertRaises(TypeError, meth, ts, off42) - self.assertRaises(TypeError, meth, ts, tzinfo=off42) + with warnings.catch_warnings(category=DeprecationWarning): + warnings.simplefilter("ignore", category=DeprecationWarning) + self.assertRaises(TypeError, meth, ts, off42) + self.assertRaises(TypeError, meth, ts, tzinfo=off42) def test_tzinfo_timetuple(self): # TestDateTime tested most of this. datetime adds a twist to the @@ -6098,15 +6108,14 @@ def stats(cls, start_year=1): def transitions(self): for (_, prev_ti), (t, ti) in pairs(zip(self.ut, self.ti)): shift = ti[0] - prev_ti[0] - # TODO: Remove this use of utcfromtimestamp - yield datetime.utcfromtimestamp(t), shift + yield (EPOCH_NAIVE + timedelta(seconds=t)), shift def nondst_folds(self): """Find all folds with the same value of isdst on both sides of the transition.""" for (_, prev_ti), (t, ti) in pairs(zip(self.ut, self.ti)): shift = ti[0] - prev_ti[0] if shift < ZERO and ti[1] == prev_ti[1]: - yield datetime.utcfromtimestamp(t), -shift, prev_ti[2], ti[2] + yield _utcfromtimestamp(datetime, t,), -shift, prev_ti[2], ti[2] @classmethod def print_all_nondst_folds(cls, same_abbr=False, start_year=1): diff --git a/Lib/test/libregrtest/utils.py b/Lib/test/libregrtest/utils.py index fb13fa0e243ba7..fd46819fd903fe 100644 --- a/Lib/test/libregrtest/utils.py +++ b/Lib/test/libregrtest/utils.py @@ -210,6 +210,13 @@ def clear_caches(): else: fractions._hash_algorithm.cache_clear() + try: + inspect = sys.modules['inspect'] + except KeyError: + pass + else: + inspect._shadowed_dict_from_mro_tuple.cache_clear() + def get_build_info(): # Get most important configure and build options as a list of strings. diff --git a/Lib/test/support/bytecode_helper.py b/Lib/test/support/bytecode_helper.py index 1d9b889c920986..7b577f54b8adc4 100644 --- a/Lib/test/support/bytecode_helper.py +++ b/Lib/test/support/bytecode_helper.py @@ -3,7 +3,7 @@ import unittest import dis import io -from _testinternalcapi import compiler_codegen, optimize_cfg +from _testinternalcapi import compiler_codegen, optimize_cfg, assemble_code_object _UNSPECIFIED = object() @@ -108,30 +108,36 @@ def normalize_insts(self, insts): res.append((opcode, arg, *loc)) return res + def complete_insts_info(self, insts): + # fill in omitted fields in location, and oparg 0 for ops with no arg. + res = [] + for item in insts: + assert isinstance(item, tuple) + inst = list(item) + opcode = dis.opmap[inst[0]] + oparg = inst[1] + loc = inst[2:] + [-1] * (6 - len(inst)) + res.append((opcode, oparg, *loc)) + return res + class CodegenTestCase(CompilationStepTestCase): def generate_code(self, ast): - insts = compiler_codegen(ast, "my_file.py", 0) + insts, _ = compiler_codegen(ast, "my_file.py", 0) return insts class CfgOptimizationTestCase(CompilationStepTestCase): - def complete_insts_info(self, insts): - # fill in omitted fields in location, and oparg 0 for ops with no arg. - res = [] - for item in insts: - assert isinstance(item, tuple) - inst = list(reversed(item)) - opcode = dis.opmap[inst.pop()] - oparg = inst.pop() - loc = inst + [-1] * (4 - len(inst)) - res.append((opcode, oparg, *loc)) - return res - def get_optimized(self, insts, consts): insts = self.normalize_insts(insts) insts = self.complete_insts_info(insts) insts = optimize_cfg(insts, consts) return insts, consts + +class AssemblerTestCase(CompilationStepTestCase): + + def get_code_object(self, filename, insts, metadata): + co = assemble_code_object(filename, insts, metadata) + return co diff --git a/Lib/test/support/import_helper.py b/Lib/test/support/import_helper.py index 772c0987c2ebef..67f18e530edc4b 100644 --- a/Lib/test/support/import_helper.py +++ b/Lib/test/support/import_helper.py @@ -115,6 +115,8 @@ def multi_interp_extensions_check(enabled=True): It overrides the PyInterpreterConfig.check_multi_interp_extensions setting (see support.run_in_subinterp_with_config() and _xxsubinterpreters.create()). + + Also see importlib.utils.allowing_all_extensions(). """ old = _imp._override_multi_interp_extensions_check(1 if enabled else -1) try: diff --git a/Lib/test/test_argparse.py b/Lib/test/test_argparse.py index 861da2326d1214..0659d244d35686 100644 --- a/Lib/test/test_argparse.py +++ b/Lib/test/test_argparse.py @@ -1,5 +1,7 @@ # Author: Steven J. Bethard . +import contextlib +import functools import inspect import io import operator @@ -35,6 +37,35 @@ def getvalue(self): return self.buffer.raw.getvalue().decode('utf-8') +class StdStreamTest(unittest.TestCase): + + def test_skip_invalid_stderr(self): + parser = argparse.ArgumentParser() + with ( + contextlib.redirect_stderr(None), + mock.patch('argparse._sys.exit') + ): + parser.exit(status=0, message='foo') + + def test_skip_invalid_stdout(self): + parser = argparse.ArgumentParser() + for func in ( + parser.print_usage, + parser.print_help, + functools.partial(parser.parse_args, ['-h']) + ): + with ( + self.subTest(func=func), + contextlib.redirect_stdout(None), + # argparse uses stderr as a fallback + StdIOBuffer() as mocked_stderr, + contextlib.redirect_stderr(mocked_stderr), + mock.patch('argparse._sys.exit'), + ): + func() + self.assertRegex(mocked_stderr.getvalue(), r'usage:') + + class TestCase(unittest.TestCase): def setUp(self): diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py index 8eef7baec70118..fdd21aca06ffdd 100644 --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -8,9 +8,11 @@ import unittest import warnings import weakref +from functools import partial from textwrap import dedent from test import support +from test.support.import_helper import import_fresh_module from test.support import os_helper, script_helper from test.support.ast_helper import ASTTestMixin @@ -267,6 +269,7 @@ def to_tuple(t): # excepthandler, arguments, keywords, alias class AST_Tests(unittest.TestCase): + maxDiff = None def _is_ast_node(self, name, node): if not isinstance(node, type): @@ -435,16 +438,42 @@ def test_base_classes(self): self.assertTrue(issubclass(ast.comprehension, ast.AST)) self.assertTrue(issubclass(ast.Gt, ast.AST)) + def test_import_deprecated(self): + ast = import_fresh_module('ast') + depr_regex = ( + r'ast\.{} is deprecated and will be removed in Python 3.14; ' + r'use ast\.Constant instead' + ) + for name in 'Num', 'Str', 'Bytes', 'NameConstant', 'Ellipsis': + with self.assertWarnsRegex(DeprecationWarning, depr_regex.format(name)): + getattr(ast, name) + + def test_field_attr_existence_deprecated(self): + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', '', DeprecationWarning) + from ast import Num, Str, Bytes, NameConstant, Ellipsis + + for name in ('Num', 'Str', 'Bytes', 'NameConstant', 'Ellipsis'): + item = getattr(ast, name) + if self._is_ast_node(name, item): + with self.subTest(item): + with self.assertWarns(DeprecationWarning): + x = item() + if isinstance(x, ast.AST): + self.assertIs(type(x._fields), tuple) + def test_field_attr_existence(self): for name, item in ast.__dict__.items(): + # These emit DeprecationWarnings + if name in {'Num', 'Str', 'Bytes', 'NameConstant', 'Ellipsis'}: + continue + # constructor has a different signature + if name == 'Index': + continue if self._is_ast_node(name, item): - if name == 'Index': - # Index(value) just returns value now. - # The argument is required. - continue x = item() if isinstance(x, ast.AST): - self.assertEqual(type(x._fields), tuple) + self.assertIs(type(x._fields), tuple) def test_arguments(self): x = ast.arguments() @@ -459,25 +488,108 @@ def test_arguments(self): self.assertEqual(x.args, 2) self.assertEqual(x.vararg, 3) + def test_field_attr_writable_deprecated(self): + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', '', DeprecationWarning) + x = ast.Num() + # We can assign to _fields + x._fields = 666 + self.assertEqual(x._fields, 666) + def test_field_attr_writable(self): - x = ast.Num() + x = ast.Constant() # We can assign to _fields x._fields = 666 self.assertEqual(x._fields, 666) + def test_classattrs_deprecated(self): + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', '', DeprecationWarning) + from ast import Num, Str, Bytes, NameConstant, Ellipsis + + with warnings.catch_warnings(record=True) as wlog: + warnings.filterwarnings('always', '', DeprecationWarning) + x = ast.Num() + self.assertEqual(x._fields, ('value', 'kind')) + + with self.assertRaises(AttributeError): + x.value + + with self.assertRaises(AttributeError): + x.n + + x = ast.Num(42) + self.assertEqual(x.value, 42) + self.assertEqual(x.n, 42) + + with self.assertRaises(AttributeError): + x.lineno + + with self.assertRaises(AttributeError): + x.foobar + + x = ast.Num(lineno=2) + self.assertEqual(x.lineno, 2) + + x = ast.Num(42, lineno=0) + self.assertEqual(x.lineno, 0) + self.assertEqual(x._fields, ('value', 'kind')) + self.assertEqual(x.value, 42) + self.assertEqual(x.n, 42) + + self.assertRaises(TypeError, ast.Num, 1, None, 2) + self.assertRaises(TypeError, ast.Num, 1, None, 2, lineno=0) + + # Arbitrary keyword arguments are supported + self.assertEqual(ast.Num(1, foo='bar').foo, 'bar') + + with self.assertRaisesRegex(TypeError, "Num got multiple values for argument 'n'"): + ast.Num(1, n=2) + + self.assertEqual(ast.Num(42).n, 42) + self.assertEqual(ast.Num(4.25).n, 4.25) + self.assertEqual(ast.Num(4.25j).n, 4.25j) + self.assertEqual(ast.Str('42').s, '42') + self.assertEqual(ast.Bytes(b'42').s, b'42') + self.assertIs(ast.NameConstant(True).value, True) + self.assertIs(ast.NameConstant(False).value, False) + self.assertIs(ast.NameConstant(None).value, None) + + self.assertEqual([str(w.message) for w in wlog], [ + 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', + 'Attribute n is deprecated and will be removed in Python 3.14; use value instead', + 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', + 'Attribute n is deprecated and will be removed in Python 3.14; use value instead', + 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', + 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', + 'Attribute n is deprecated and will be removed in Python 3.14; use value instead', + 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', + 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', + 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', + 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', + 'Attribute n is deprecated and will be removed in Python 3.14; use value instead', + 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', + 'Attribute n is deprecated and will be removed in Python 3.14; use value instead', + 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', + 'Attribute n is deprecated and will be removed in Python 3.14; use value instead', + 'ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead', + 'Attribute s is deprecated and will be removed in Python 3.14; use value instead', + 'ast.Bytes is deprecated and will be removed in Python 3.14; use ast.Constant instead', + 'Attribute s is deprecated and will be removed in Python 3.14; use value instead', + 'ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant instead', + 'ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant instead', + 'ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant instead', + ]) + def test_classattrs(self): - x = ast.Num() + x = ast.Constant() self.assertEqual(x._fields, ('value', 'kind')) with self.assertRaises(AttributeError): x.value - with self.assertRaises(AttributeError): - x.n - - x = ast.Num(42) + x = ast.Constant(42) self.assertEqual(x.value, 42) - self.assertEqual(x.n, 42) with self.assertRaises(AttributeError): x.lineno @@ -485,36 +597,23 @@ def test_classattrs(self): with self.assertRaises(AttributeError): x.foobar - x = ast.Num(lineno=2) + x = ast.Constant(lineno=2) self.assertEqual(x.lineno, 2) - x = ast.Num(42, lineno=0) + x = ast.Constant(42, lineno=0) self.assertEqual(x.lineno, 0) self.assertEqual(x._fields, ('value', 'kind')) self.assertEqual(x.value, 42) - self.assertEqual(x.n, 42) - self.assertRaises(TypeError, ast.Num, 1, None, 2) - self.assertRaises(TypeError, ast.Num, 1, None, 2, lineno=0) + self.assertRaises(TypeError, ast.Constant, 1, None, 2) + self.assertRaises(TypeError, ast.Constant, 1, None, 2, lineno=0) # Arbitrary keyword arguments are supported self.assertEqual(ast.Constant(1, foo='bar').foo, 'bar') - self.assertEqual(ast.Num(1, foo='bar').foo, 'bar') - with self.assertRaisesRegex(TypeError, "Num got multiple values for argument 'n'"): - ast.Num(1, n=2) with self.assertRaisesRegex(TypeError, "Constant got multiple values for argument 'value'"): ast.Constant(1, value=2) - self.assertEqual(ast.Num(42).n, 42) - self.assertEqual(ast.Num(4.25).n, 4.25) - self.assertEqual(ast.Num(4.25j).n, 4.25j) - self.assertEqual(ast.Str('42').s, '42') - self.assertEqual(ast.Bytes(b'42').s, b'42') - self.assertIs(ast.NameConstant(True).value, True) - self.assertIs(ast.NameConstant(False).value, False) - self.assertIs(ast.NameConstant(None).value, None) - self.assertEqual(ast.Constant(42).value, 42) self.assertEqual(ast.Constant(4.25).value, 4.25) self.assertEqual(ast.Constant(4.25j).value, 4.25j) @@ -526,85 +625,211 @@ def test_classattrs(self): self.assertIs(ast.Constant(...).value, ...) def test_realtype(self): - self.assertEqual(type(ast.Num(42)), ast.Constant) - self.assertEqual(type(ast.Num(4.25)), ast.Constant) - self.assertEqual(type(ast.Num(4.25j)), ast.Constant) - self.assertEqual(type(ast.Str('42')), ast.Constant) - self.assertEqual(type(ast.Bytes(b'42')), ast.Constant) - self.assertEqual(type(ast.NameConstant(True)), ast.Constant) - self.assertEqual(type(ast.NameConstant(False)), ast.Constant) - self.assertEqual(type(ast.NameConstant(None)), ast.Constant) - self.assertEqual(type(ast.Ellipsis()), ast.Constant) + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', '', DeprecationWarning) + from ast import Num, Str, Bytes, NameConstant, Ellipsis + + with warnings.catch_warnings(record=True) as wlog: + warnings.filterwarnings('always', '', DeprecationWarning) + self.assertIs(type(ast.Num(42)), ast.Constant) + self.assertIs(type(ast.Num(4.25)), ast.Constant) + self.assertIs(type(ast.Num(4.25j)), ast.Constant) + self.assertIs(type(ast.Str('42')), ast.Constant) + self.assertIs(type(ast.Bytes(b'42')), ast.Constant) + self.assertIs(type(ast.NameConstant(True)), ast.Constant) + self.assertIs(type(ast.NameConstant(False)), ast.Constant) + self.assertIs(type(ast.NameConstant(None)), ast.Constant) + self.assertIs(type(ast.Ellipsis()), ast.Constant) + + self.assertEqual([str(w.message) for w in wlog], [ + 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', + 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', + 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', + 'ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead', + 'ast.Bytes is deprecated and will be removed in Python 3.14; use ast.Constant instead', + 'ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant instead', + 'ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant instead', + 'ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant instead', + 'ast.Ellipsis is deprecated and will be removed in Python 3.14; use ast.Constant instead', + ]) def test_isinstance(self): - self.assertTrue(isinstance(ast.Num(42), ast.Num)) - self.assertTrue(isinstance(ast.Num(4.2), ast.Num)) - self.assertTrue(isinstance(ast.Num(4.2j), ast.Num)) - self.assertTrue(isinstance(ast.Str('42'), ast.Str)) - self.assertTrue(isinstance(ast.Bytes(b'42'), ast.Bytes)) - self.assertTrue(isinstance(ast.NameConstant(True), ast.NameConstant)) - self.assertTrue(isinstance(ast.NameConstant(False), ast.NameConstant)) - self.assertTrue(isinstance(ast.NameConstant(None), ast.NameConstant)) - self.assertTrue(isinstance(ast.Ellipsis(), ast.Ellipsis)) - - self.assertTrue(isinstance(ast.Constant(42), ast.Num)) - self.assertTrue(isinstance(ast.Constant(4.2), ast.Num)) - self.assertTrue(isinstance(ast.Constant(4.2j), ast.Num)) - self.assertTrue(isinstance(ast.Constant('42'), ast.Str)) - self.assertTrue(isinstance(ast.Constant(b'42'), ast.Bytes)) - self.assertTrue(isinstance(ast.Constant(True), ast.NameConstant)) - self.assertTrue(isinstance(ast.Constant(False), ast.NameConstant)) - self.assertTrue(isinstance(ast.Constant(None), ast.NameConstant)) - self.assertTrue(isinstance(ast.Constant(...), ast.Ellipsis)) - - self.assertFalse(isinstance(ast.Str('42'), ast.Num)) - self.assertFalse(isinstance(ast.Num(42), ast.Str)) - self.assertFalse(isinstance(ast.Str('42'), ast.Bytes)) - self.assertFalse(isinstance(ast.Num(42), ast.NameConstant)) - self.assertFalse(isinstance(ast.Num(42), ast.Ellipsis)) - self.assertFalse(isinstance(ast.NameConstant(True), ast.Num)) - self.assertFalse(isinstance(ast.NameConstant(False), ast.Num)) - - self.assertFalse(isinstance(ast.Constant('42'), ast.Num)) - self.assertFalse(isinstance(ast.Constant(42), ast.Str)) - self.assertFalse(isinstance(ast.Constant('42'), ast.Bytes)) - self.assertFalse(isinstance(ast.Constant(42), ast.NameConstant)) - self.assertFalse(isinstance(ast.Constant(42), ast.Ellipsis)) - self.assertFalse(isinstance(ast.Constant(True), ast.Num)) - self.assertFalse(isinstance(ast.Constant(False), ast.Num)) - - self.assertFalse(isinstance(ast.Constant(), ast.Num)) - self.assertFalse(isinstance(ast.Constant(), ast.Str)) - self.assertFalse(isinstance(ast.Constant(), ast.Bytes)) - self.assertFalse(isinstance(ast.Constant(), ast.NameConstant)) - self.assertFalse(isinstance(ast.Constant(), ast.Ellipsis)) + from ast import Constant + + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', '', DeprecationWarning) + from ast import Num, Str, Bytes, NameConstant, Ellipsis + + cls_depr_msg = ( + 'ast.{} is deprecated and will be removed in Python 3.14; ' + 'use ast.Constant instead' + ) + + assertNumDeprecated = partial( + self.assertWarnsRegex, DeprecationWarning, cls_depr_msg.format("Num") + ) + assertStrDeprecated = partial( + self.assertWarnsRegex, DeprecationWarning, cls_depr_msg.format("Str") + ) + assertBytesDeprecated = partial( + self.assertWarnsRegex, DeprecationWarning, cls_depr_msg.format("Bytes") + ) + assertNameConstantDeprecated = partial( + self.assertWarnsRegex, + DeprecationWarning, + cls_depr_msg.format("NameConstant") + ) + assertEllipsisDeprecated = partial( + self.assertWarnsRegex, DeprecationWarning, cls_depr_msg.format("Ellipsis") + ) + + for arg in 42, 4.2, 4.2j: + with self.subTest(arg=arg): + with assertNumDeprecated(): + n = Num(arg) + with assertNumDeprecated(): + self.assertIsInstance(n, Num) + + with assertStrDeprecated(): + s = Str('42') + with assertStrDeprecated(): + self.assertIsInstance(s, Str) + + with assertBytesDeprecated(): + b = Bytes(b'42') + with assertBytesDeprecated(): + self.assertIsInstance(b, Bytes) + + for arg in True, False, None: + with self.subTest(arg=arg): + with assertNameConstantDeprecated(): + n = NameConstant(arg) + with assertNameConstantDeprecated(): + self.assertIsInstance(n, NameConstant) + + with assertEllipsisDeprecated(): + e = Ellipsis() + with assertEllipsisDeprecated(): + self.assertIsInstance(e, Ellipsis) + + for arg in 42, 4.2, 4.2j: + with self.subTest(arg=arg): + with assertNumDeprecated(): + self.assertIsInstance(Constant(arg), Num) + + with assertStrDeprecated(): + self.assertIsInstance(Constant('42'), Str) + + with assertBytesDeprecated(): + self.assertIsInstance(Constant(b'42'), Bytes) + + for arg in True, False, None: + with self.subTest(arg=arg): + with assertNameConstantDeprecated(): + self.assertIsInstance(Constant(arg), NameConstant) + + with assertEllipsisDeprecated(): + self.assertIsInstance(Constant(...), Ellipsis) + + with assertStrDeprecated(): + s = Str('42') + assertNumDeprecated(self.assertNotIsInstance, s, Num) + assertBytesDeprecated(self.assertNotIsInstance, s, Bytes) + + with assertNumDeprecated(): + n = Num(42) + assertStrDeprecated(self.assertNotIsInstance, n, Str) + assertNameConstantDeprecated(self.assertNotIsInstance, n, NameConstant) + assertEllipsisDeprecated(self.assertNotIsInstance, n, Ellipsis) + + with assertNameConstantDeprecated(): + n = NameConstant(True) + with assertNumDeprecated(): + self.assertNotIsInstance(n, Num) + + with assertNameConstantDeprecated(): + n = NameConstant(False) + with assertNumDeprecated(): + self.assertNotIsInstance(n, Num) + + for arg in '42', True, False: + with self.subTest(arg=arg): + with assertNumDeprecated(): + self.assertNotIsInstance(Constant(arg), Num) + + assertStrDeprecated(self.assertNotIsInstance, Constant(42), Str) + assertBytesDeprecated(self.assertNotIsInstance, Constant('42'), Bytes) + assertNameConstantDeprecated(self.assertNotIsInstance, Constant(42), NameConstant) + assertEllipsisDeprecated(self.assertNotIsInstance, Constant(42), Ellipsis) + assertNumDeprecated(self.assertNotIsInstance, Constant(), Num) + assertStrDeprecated(self.assertNotIsInstance, Constant(), Str) + assertBytesDeprecated(self.assertNotIsInstance, Constant(), Bytes) + assertNameConstantDeprecated(self.assertNotIsInstance, Constant(), NameConstant) + assertEllipsisDeprecated(self.assertNotIsInstance, Constant(), Ellipsis) class S(str): pass - self.assertTrue(isinstance(ast.Constant(S('42')), ast.Str)) - self.assertFalse(isinstance(ast.Constant(S('42')), ast.Num)) + with assertStrDeprecated(): + self.assertIsInstance(Constant(S('42')), Str) + with assertNumDeprecated(): + self.assertNotIsInstance(Constant(S('42')), Num) + + def test_constant_subclasses_deprecated(self): + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', '', DeprecationWarning) + from ast import Num - def test_subclasses(self): - class N(ast.Num): + with warnings.catch_warnings(record=True) as wlog: + warnings.filterwarnings('always', '', DeprecationWarning) + class N(ast.Num): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.z = 'spam' + class N2(ast.Num): + pass + + n = N(42) + self.assertEqual(n.n, 42) + self.assertEqual(n.z, 'spam') + self.assertIs(type(n), N) + self.assertIsInstance(n, N) + self.assertIsInstance(n, ast.Num) + self.assertNotIsInstance(n, N2) + self.assertNotIsInstance(ast.Num(42), N) + n = N(n=42) + self.assertEqual(n.n, 42) + self.assertIs(type(n), N) + + self.assertEqual([str(w.message) for w in wlog], [ + 'Attribute n is deprecated and will be removed in Python 3.14; use value instead', + 'Attribute n is deprecated and will be removed in Python 3.14; use value instead', + 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', + 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', + 'Attribute n is deprecated and will be removed in Python 3.14; use value instead', + 'Attribute n is deprecated and will be removed in Python 3.14; use value instead', + ]) + + def test_constant_subclasses(self): + class N(ast.Constant): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.z = 'spam' - class N2(ast.Num): + class N2(ast.Constant): pass n = N(42) - self.assertEqual(n.n, 42) + self.assertEqual(n.value, 42) self.assertEqual(n.z, 'spam') self.assertEqual(type(n), N) self.assertTrue(isinstance(n, N)) - self.assertTrue(isinstance(n, ast.Num)) + self.assertTrue(isinstance(n, ast.Constant)) self.assertFalse(isinstance(n, N2)) - self.assertFalse(isinstance(ast.Num(42), N)) - n = N(n=42) - self.assertEqual(n.n, 42) + self.assertFalse(isinstance(ast.Constant(42), N)) + n = N(value=42) + self.assertEqual(n.value, 42) self.assertEqual(type(n), N) def test_module(self): - body = [ast.Num(42)] + body = [ast.Constant(42)] x = ast.Module(body, []) self.assertEqual(x.body, body) @@ -617,8 +842,8 @@ def test_nodeclasses(self): x.foobarbaz = 5 self.assertEqual(x.foobarbaz, 5) - n1 = ast.Num(1) - n3 = ast.Num(3) + n1 = ast.Constant(1) + n3 = ast.Constant(3) addop = ast.Add() x = ast.BinOp(n1, addop, n3) self.assertEqual(x.left, n1) @@ -987,7 +1212,7 @@ def test_dump_incomplete(self): def test_copy_location(self): src = ast.parse('1 + 1', mode='eval') - src.body.right = ast.copy_location(ast.Num(2), src.body.right) + src.body.right = ast.copy_location(ast.Constant(2), src.body.right) self.assertEqual(ast.dump(src, include_attributes=True), 'Expression(body=BinOp(left=Constant(value=1, lineno=1, col_offset=0, ' 'end_lineno=1, end_col_offset=1), op=Add(), right=Constant(value=2, ' @@ -1004,7 +1229,7 @@ def test_copy_location(self): def test_fix_missing_locations(self): src = ast.parse('write("spam")') src.body.append(ast.Expr(ast.Call(ast.Name('spam', ast.Load()), - [ast.Str('eggs')], []))) + [ast.Constant('eggs')], []))) self.assertEqual(src, ast.fix_missing_locations(src)) self.maxDiff = None self.assertEqual(ast.dump(src, include_attributes=True), @@ -1317,9 +1542,9 @@ def arguments(args=None, posonlyargs=None, vararg=None, check(arguments(args=args), "must have Load context") check(arguments(posonlyargs=args), "must have Load context") check(arguments(kwonlyargs=args), "must have Load context") - check(arguments(defaults=[ast.Num(3)]), + check(arguments(defaults=[ast.Constant(3)]), "more positional defaults than args") - check(arguments(kw_defaults=[ast.Num(4)]), + check(arguments(kw_defaults=[ast.Constant(4)]), "length of kwonlyargs is not the same as kw_defaults") args = [ast.arg("x", ast.Name("x", ast.Load()))] check(arguments(args=args, defaults=[ast.Name("x", ast.Store())]), @@ -1372,9 +1597,9 @@ def test_delete(self): "must have Del context") def test_assign(self): - self.stmt(ast.Assign([], ast.Num(3)), "empty targets on Assign") - self.stmt(ast.Assign([None], ast.Num(3)), "None disallowed") - self.stmt(ast.Assign([ast.Name("x", ast.Load())], ast.Num(3)), + self.stmt(ast.Assign([], ast.Constant(3)), "empty targets on Assign") + self.stmt(ast.Assign([None], ast.Constant(3)), "None disallowed") + self.stmt(ast.Assign([ast.Name("x", ast.Load())], ast.Constant(3)), "must have Store context") self.stmt(ast.Assign([ast.Name("x", ast.Store())], ast.Name("y", ast.Store())), @@ -1402,39 +1627,39 @@ def test_for(self): self.stmt(ast.For(x, y, [p], [e]), "must have Load context") def test_while(self): - self.stmt(ast.While(ast.Num(3), [], []), "empty body on While") + self.stmt(ast.While(ast.Constant(3), [], []), "empty body on While") self.stmt(ast.While(ast.Name("x", ast.Store()), [ast.Pass()], []), "must have Load context") - self.stmt(ast.While(ast.Num(3), [ast.Pass()], + self.stmt(ast.While(ast.Constant(3), [ast.Pass()], [ast.Expr(ast.Name("x", ast.Store()))]), "must have Load context") def test_if(self): - self.stmt(ast.If(ast.Num(3), [], []), "empty body on If") + self.stmt(ast.If(ast.Constant(3), [], []), "empty body on If") i = ast.If(ast.Name("x", ast.Store()), [ast.Pass()], []) self.stmt(i, "must have Load context") - i = ast.If(ast.Num(3), [ast.Expr(ast.Name("x", ast.Store()))], []) + i = ast.If(ast.Constant(3), [ast.Expr(ast.Name("x", ast.Store()))], []) self.stmt(i, "must have Load context") - i = ast.If(ast.Num(3), [ast.Pass()], + i = ast.If(ast.Constant(3), [ast.Pass()], [ast.Expr(ast.Name("x", ast.Store()))]) self.stmt(i, "must have Load context") def test_with(self): p = ast.Pass() self.stmt(ast.With([], [p]), "empty items on With") - i = ast.withitem(ast.Num(3), None) + i = ast.withitem(ast.Constant(3), None) self.stmt(ast.With([i], []), "empty body on With") i = ast.withitem(ast.Name("x", ast.Store()), None) self.stmt(ast.With([i], [p]), "must have Load context") - i = ast.withitem(ast.Num(3), ast.Name("x", ast.Load())) + i = ast.withitem(ast.Constant(3), ast.Name("x", ast.Load())) self.stmt(ast.With([i], [p]), "must have Store context") def test_raise(self): - r = ast.Raise(None, ast.Num(3)) + r = ast.Raise(None, ast.Constant(3)) self.stmt(r, "Raise with cause but no exception") r = ast.Raise(ast.Name("x", ast.Store()), None) self.stmt(r, "must have Load context") - r = ast.Raise(ast.Num(4), ast.Name("x", ast.Store())) + r = ast.Raise(ast.Constant(4), ast.Name("x", ast.Store())) self.stmt(r, "must have Load context") def test_try(self): @@ -1505,11 +1730,11 @@ def test_expr(self): def test_boolop(self): b = ast.BoolOp(ast.And(), []) self.expr(b, "less than 2 values") - b = ast.BoolOp(ast.And(), [ast.Num(3)]) + b = ast.BoolOp(ast.And(), [ast.Constant(3)]) self.expr(b, "less than 2 values") - b = ast.BoolOp(ast.And(), [ast.Num(4), None]) + b = ast.BoolOp(ast.And(), [ast.Constant(4), None]) self.expr(b, "None disallowed") - b = ast.BoolOp(ast.And(), [ast.Num(4), ast.Name("x", ast.Store())]) + b = ast.BoolOp(ast.And(), [ast.Constant(4), ast.Name("x", ast.Store())]) self.expr(b, "must have Load context") def test_unaryop(self): @@ -1597,11 +1822,11 @@ def test_compare(self): left = ast.Name("x", ast.Load()) comp = ast.Compare(left, [ast.In()], []) self.expr(comp, "no comparators") - comp = ast.Compare(left, [ast.In()], [ast.Num(4), ast.Num(5)]) + comp = ast.Compare(left, [ast.In()], [ast.Constant(4), ast.Constant(5)]) self.expr(comp, "different number of comparators and operands") - comp = ast.Compare(ast.Num("blah"), [ast.In()], [left]) + comp = ast.Compare(ast.Constant("blah"), [ast.In()], [left]) self.expr(comp) - comp = ast.Compare(left, [ast.In()], [ast.Num("blah")]) + comp = ast.Compare(left, [ast.In()], [ast.Constant("blah")]) self.expr(comp) def test_call(self): @@ -1617,23 +1842,37 @@ def test_call(self): self.expr(call, "must have Load context") def test_num(self): - class subint(int): - pass - class subfloat(float): - pass - class subcomplex(complex): - pass - for obj in "0", "hello": - self.expr(ast.Num(obj)) - for obj in subint(), subfloat(), subcomplex(): - self.expr(ast.Num(obj), "invalid type", exc=TypeError) + with warnings.catch_warnings(record=True) as wlog: + warnings.filterwarnings('ignore', '', DeprecationWarning) + from ast import Num + + with warnings.catch_warnings(record=True) as wlog: + warnings.filterwarnings('always', '', DeprecationWarning) + class subint(int): + pass + class subfloat(float): + pass + class subcomplex(complex): + pass + for obj in "0", "hello": + self.expr(ast.Num(obj)) + for obj in subint(), subfloat(), subcomplex(): + self.expr(ast.Num(obj), "invalid type", exc=TypeError) + + self.assertEqual([str(w.message) for w in wlog], [ + 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', + 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', + 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', + 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', + 'ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead', + ]) def test_attribute(self): attr = ast.Attribute(ast.Name("x", ast.Store()), "y", ast.Load()) self.expr(attr, "must have Load context") def test_subscript(self): - sub = ast.Subscript(ast.Name("x", ast.Store()), ast.Num(3), + sub = ast.Subscript(ast.Name("x", ast.Store()), ast.Constant(3), ast.Load()) self.expr(sub, "must have Load context") x = ast.Name("x", ast.Load()) @@ -1653,7 +1892,7 @@ def test_subscript(self): def test_starred(self): left = ast.List([ast.Starred(ast.Name("x", ast.Load()), ast.Store())], ast.Store()) - assign = ast.Assign([left], ast.Num(4)) + assign = ast.Assign([left], ast.Constant(4)) self.stmt(assign, "must have Store context") def _sequence(self, fac): @@ -1668,7 +1907,17 @@ def test_tuple(self): self._sequence(ast.Tuple) def test_nameconstant(self): - self.expr(ast.NameConstant(4)) + with warnings.catch_warnings(record=True) as wlog: + warnings.filterwarnings('ignore', '', DeprecationWarning) + from ast import NameConstant + + with warnings.catch_warnings(record=True) as wlog: + warnings.filterwarnings('always', '', DeprecationWarning) + self.expr(ast.NameConstant(4)) + + self.assertEqual([str(w.message) for w in wlog], [ + 'ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant instead', + ]) def test_stdlib_validates(self): stdlib = os.path.dirname(ast.__file__) @@ -2357,10 +2606,15 @@ def visit_Ellipsis(self, node): ]) self.assertEqual([str(w.message) for w in wlog], [ 'visit_Num is deprecated; add visit_Constant', + 'Attribute n is deprecated and will be removed in Python 3.14; use value instead', 'visit_Num is deprecated; add visit_Constant', + 'Attribute n is deprecated and will be removed in Python 3.14; use value instead', 'visit_Num is deprecated; add visit_Constant', + 'Attribute n is deprecated and will be removed in Python 3.14; use value instead', 'visit_Str is deprecated; add visit_Constant', + 'Attribute s is deprecated and will be removed in Python 3.14; use value instead', 'visit_Bytes is deprecated; add visit_Constant', + 'Attribute s is deprecated and will be removed in Python 3.14; use value instead', 'visit_NameConstant is deprecated; add visit_Constant', 'visit_NameConstant is deprecated; add visit_Constant', 'visit_Ellipsis is deprecated; add visit_Constant', diff --git a/Lib/test/test_asyncio/test_eager_task_factory.py b/Lib/test/test_asyncio/test_eager_task_factory.py new file mode 100644 index 00000000000000..fe690934292a86 --- /dev/null +++ b/Lib/test/test_asyncio/test_eager_task_factory.py @@ -0,0 +1,344 @@ +"""Tests for base_events.py""" + +import asyncio +import contextvars +import gc +import time +import unittest + +from types import GenericAlias +from unittest import mock +from asyncio import base_events +from asyncio import tasks +from test.test_asyncio import utils as test_utils +from test.test_asyncio.test_tasks import get_innermost_context +from test import support + +MOCK_ANY = mock.ANY + + +def tearDownModule(): + asyncio.set_event_loop_policy(None) + + +class EagerTaskFactoryLoopTests: + + Task = None + + def run_coro(self, coro): + """ + Helper method to run the `coro` coroutine in the test event loop. + It helps with making sure the event loop is running before starting + to execute `coro`. This is important for testing the eager step + functionality, since an eager step is taken only if the event loop + is already running. + """ + + async def coro_runner(): + self.assertTrue(asyncio.get_event_loop().is_running()) + return await coro + + return self.loop.run_until_complete(coro) + + def setUp(self): + super().setUp() + self.loop = asyncio.new_event_loop() + self.eager_task_factory = asyncio.create_eager_task_factory(self.Task) + self.loop.set_task_factory(self.eager_task_factory) + self.set_event_loop(self.loop) + + def test_eager_task_factory_set(self): + self.assertIsNotNone(self.eager_task_factory) + self.assertIs(self.loop.get_task_factory(), self.eager_task_factory) + + async def noop(): pass + + async def run(): + t = self.loop.create_task(noop()) + self.assertIsInstance(t, self.Task) + await t + + self.run_coro(run()) + + def test_await_future_during_eager_step(self): + + async def set_result(fut, val): + fut.set_result(val) + + async def run(): + fut = self.loop.create_future() + t = self.loop.create_task(set_result(fut, 'my message')) + # assert the eager step completed the task + self.assertTrue(t.done()) + return await fut + + self.assertEqual(self.run_coro(run()), 'my message') + + def test_eager_completion(self): + + async def coro(): + return 'hello' + + async def run(): + t = self.loop.create_task(coro()) + # assert the eager step completed the task + self.assertTrue(t.done()) + return await t + + self.assertEqual(self.run_coro(run()), 'hello') + + def test_block_after_eager_step(self): + + async def coro(): + await asyncio.sleep(0.1) + return 'finished after blocking' + + async def run(): + t = self.loop.create_task(coro()) + self.assertFalse(t.done()) + result = await t + self.assertTrue(t.done()) + return result + + self.assertEqual(self.run_coro(run()), 'finished after blocking') + + def test_cancellation_after_eager_completion(self): + + async def coro(): + return 'finished without blocking' + + async def run(): + t = self.loop.create_task(coro()) + t.cancel() + result = await t + # finished task can't be cancelled + self.assertFalse(t.cancelled()) + return result + + self.assertEqual(self.run_coro(run()), 'finished without blocking') + + def test_cancellation_after_eager_step_blocks(self): + + async def coro(): + await asyncio.sleep(0.1) + return 'finished after blocking' + + async def run(): + t = self.loop.create_task(coro()) + t.cancel('cancellation message') + self.assertGreater(t.cancelling(), 0) + result = await t + + with self.assertRaises(asyncio.CancelledError) as cm: + self.run_coro(run()) + + self.assertEqual('cancellation message', cm.exception.args[0]) + + def test_current_task(self): + captured_current_task = None + + async def coro(): + nonlocal captured_current_task + captured_current_task = asyncio.current_task() + # verify the task before and after blocking is identical + await asyncio.sleep(0.1) + self.assertIs(asyncio.current_task(), captured_current_task) + + async def run(): + t = self.loop.create_task(coro()) + self.assertIs(captured_current_task, t) + await t + + self.run_coro(run()) + captured_current_task = None + + def test_all_tasks_with_eager_completion(self): + captured_all_tasks = None + + async def coro(): + nonlocal captured_all_tasks + captured_all_tasks = asyncio.all_tasks() + + async def run(): + t = self.loop.create_task(coro()) + self.assertIn(t, captured_all_tasks) + self.assertNotIn(t, asyncio.all_tasks()) + + self.run_coro(run()) + + def test_all_tasks_with_blocking(self): + captured_eager_all_tasks = None + + async def coro(fut1, fut2): + nonlocal captured_eager_all_tasks + captured_eager_all_tasks = asyncio.all_tasks() + await fut1 + fut2.set_result(None) + + async def run(): + fut1 = self.loop.create_future() + fut2 = self.loop.create_future() + t = self.loop.create_task(coro(fut1, fut2)) + self.assertIn(t, captured_eager_all_tasks) + self.assertIn(t, asyncio.all_tasks()) + fut1.set_result(None) + await fut2 + self.assertNotIn(t, asyncio.all_tasks()) + + self.run_coro(run()) + + def test_context_vars(self): + cv = contextvars.ContextVar('cv', default=0) + + coro_first_step_ran = False + coro_second_step_ran = False + + async def coro(): + nonlocal coro_first_step_ran + nonlocal coro_second_step_ran + self.assertEqual(cv.get(), 1) + cv.set(2) + self.assertEqual(cv.get(), 2) + coro_first_step_ran = True + await asyncio.sleep(0.1) + self.assertEqual(cv.get(), 2) + cv.set(3) + self.assertEqual(cv.get(), 3) + coro_second_step_ran = True + + async def run(): + cv.set(1) + t = self.loop.create_task(coro()) + self.assertTrue(coro_first_step_ran) + self.assertFalse(coro_second_step_ran) + self.assertEqual(cv.get(), 1) + await t + self.assertTrue(coro_second_step_ran) + self.assertEqual(cv.get(), 1) + + self.run_coro(run()) + + +class PyEagerTaskFactoryLoopTests(EagerTaskFactoryLoopTests, test_utils.TestCase): + Task = tasks._PyTask + + +@unittest.skipUnless(hasattr(tasks, '_CTask'), + 'requires the C _asyncio module') +class CEagerTaskFactoryLoopTests(EagerTaskFactoryLoopTests, test_utils.TestCase): + Task = getattr(tasks, '_CTask', None) + + +class AsyncTaskCounter: + def __init__(self, loop, *, task_class, eager): + self.suspense_count = 0 + self.task_count = 0 + + def CountingTask(*args, eager_start=False, **kwargs): + if not eager_start: + self.task_count += 1 + kwargs["eager_start"] = eager_start + return task_class(*args, **kwargs) + + if eager: + factory = asyncio.create_eager_task_factory(CountingTask) + else: + def factory(loop, coro, **kwargs): + return CountingTask(coro, loop=loop, **kwargs) + loop.set_task_factory(factory) + + def get(self): + return self.task_count + + +async def awaitable_chain(depth): + if depth == 0: + return 0 + return 1 + await awaitable_chain(depth - 1) + + +async def recursive_taskgroups(width, depth): + if depth == 0: + return + + async with asyncio.TaskGroup() as tg: + futures = [ + tg.create_task(recursive_taskgroups(width, depth - 1)) + for _ in range(width) + ] + + +async def recursive_gather(width, depth): + if depth == 0: + return + + await asyncio.gather( + *[recursive_gather(width, depth - 1) for _ in range(width)] + ) + + +class BaseTaskCountingTests: + + Task = None + eager = None + expected_task_count = None + + def setUp(self): + super().setUp() + self.loop = asyncio.new_event_loop() + self.counter = AsyncTaskCounter(self.loop, task_class=self.Task, eager=self.eager) + self.set_event_loop(self.loop) + + def test_awaitables_chain(self): + observed_depth = self.loop.run_until_complete(awaitable_chain(100)) + self.assertEqual(observed_depth, 100) + self.assertEqual(self.counter.get(), 0 if self.eager else 1) + + def test_recursive_taskgroups(self): + num_tasks = self.loop.run_until_complete(recursive_taskgroups(5, 4)) + self.assertEqual(self.counter.get(), self.expected_task_count) + + def test_recursive_gather(self): + self.loop.run_until_complete(recursive_gather(5, 4)) + self.assertEqual(self.counter.get(), self.expected_task_count) + + +class BaseNonEagerTaskFactoryTests(BaseTaskCountingTests): + eager = False + expected_task_count = 781 # 1 + 5 + 5^2 + 5^3 + 5^4 + + +class BaseEagerTaskFactoryTests(BaseTaskCountingTests): + eager = True + expected_task_count = 0 + + +class NonEagerTests(BaseNonEagerTaskFactoryTests, test_utils.TestCase): + Task = asyncio.Task + + +class EagerTests(BaseEagerTaskFactoryTests, test_utils.TestCase): + Task = asyncio.Task + + +class NonEagerPyTaskTests(BaseNonEagerTaskFactoryTests, test_utils.TestCase): + Task = tasks._PyTask + + +class EagerPyTaskTests(BaseEagerTaskFactoryTests, test_utils.TestCase): + Task = tasks._PyTask + + +@unittest.skipUnless(hasattr(tasks, '_CTask'), + 'requires the C _asyncio module') +class NonEagerCTaskTests(BaseNonEagerTaskFactoryTests, test_utils.TestCase): + Task = getattr(tasks, '_CTask', None) + + +@unittest.skipUnless(hasattr(tasks, '_CTask'), + 'requires the C _asyncio module') +class EagerCTaskTests(BaseEagerTaskFactoryTests, test_utils.TestCase): + Task = getattr(tasks, '_CTask', None) + +if __name__ == '__main__': + unittest.main() diff --git a/Lib/test/test_bool.py b/Lib/test/test_bool.py index 916e22a527a8e0..34ecb45f161dfe 100644 --- a/Lib/test/test_bool.py +++ b/Lib/test/test_bool.py @@ -58,8 +58,22 @@ def test_math(self): self.assertEqual(-True, -1) self.assertEqual(abs(True), 1) self.assertIsNot(abs(True), True) - self.assertEqual(~False, -1) - self.assertEqual(~True, -2) + with self.assertWarns(DeprecationWarning): + # We need to put the bool in a variable, because the constant + # ~False is evaluated at compile time due to constant folding; + # consequently the DeprecationWarning would be issued during + # module loading and not during test execution. + false = False + self.assertEqual(~false, -1) + with self.assertWarns(DeprecationWarning): + # also check that the warning is issued in case of constant + # folding at compile time + self.assertEqual(eval("~False"), -1) + with self.assertWarns(DeprecationWarning): + true = True + self.assertEqual(~true, -2) + with self.assertWarns(DeprecationWarning): + self.assertEqual(eval("~True"), -2) self.assertEqual(False+2, 2) self.assertEqual(True+2, 3) diff --git a/Lib/test/test_buffer.py b/Lib/test/test_buffer.py index 098d2d999643cb..2c65ae8114818f 100644 --- a/Lib/test/test_buffer.py +++ b/Lib/test/test_buffer.py @@ -17,6 +17,7 @@ import unittest from test import support from test.support import os_helper +import inspect from itertools import permutations, product from random import randrange, sample, choice import warnings @@ -4438,5 +4439,316 @@ def test_pybuffer_size_from_format(self): struct.calcsize(format)) +class TestPythonBufferProtocol(unittest.TestCase): + def test_basic(self): + class MyBuffer: + def __buffer__(self, flags): + return memoryview(b"hello") + + mv = memoryview(MyBuffer()) + self.assertEqual(mv.tobytes(), b"hello") + self.assertEqual(bytes(MyBuffer()), b"hello") + + def test_bad_buffer_method(self): + class MustReturnMV: + def __buffer__(self, flags): + return 42 + + self.assertRaises(TypeError, memoryview, MustReturnMV()) + + class NoBytesEither: + def __buffer__(self, flags): + return b"hello" + + self.assertRaises(TypeError, memoryview, NoBytesEither()) + + class WrongArity: + def __buffer__(self): + return memoryview(b"hello") + + self.assertRaises(TypeError, memoryview, WrongArity()) + + def test_release_buffer(self): + class WhatToRelease: + def __init__(self): + self.held = False + self.ba = bytearray(b"hello") + + def __buffer__(self, flags): + if self.held: + raise TypeError("already held") + self.held = True + return memoryview(self.ba) + + def __release_buffer__(self, buffer): + self.held = False + + wr = WhatToRelease() + self.assertFalse(wr.held) + with memoryview(wr) as mv: + self.assertTrue(wr.held) + self.assertEqual(mv.tobytes(), b"hello") + self.assertFalse(wr.held) + + def test_same_buffer_returned(self): + class WhatToRelease: + def __init__(self): + self.held = False + self.ba = bytearray(b"hello") + self.created_mv = None + + def __buffer__(self, flags): + if self.held: + raise TypeError("already held") + self.held = True + self.created_mv = memoryview(self.ba) + return self.created_mv + + def __release_buffer__(self, buffer): + assert buffer is self.created_mv + self.held = False + + wr = WhatToRelease() + self.assertFalse(wr.held) + with memoryview(wr) as mv: + self.assertTrue(wr.held) + self.assertEqual(mv.tobytes(), b"hello") + self.assertFalse(wr.held) + + def test_buffer_flags(self): + class PossiblyMutable: + def __init__(self, data, mutable) -> None: + self._data = bytearray(data) + self._mutable = mutable + + def __buffer__(self, flags): + if flags & inspect.BufferFlags.WRITABLE: + if not self._mutable: + raise RuntimeError("not mutable") + return memoryview(self._data) + else: + return memoryview(bytes(self._data)) + + mutable = PossiblyMutable(b"hello", True) + immutable = PossiblyMutable(b"hello", False) + with memoryview._from_flags(mutable, inspect.BufferFlags.WRITABLE) as mv: + self.assertEqual(mv.tobytes(), b"hello") + mv[0] = ord(b'x') + self.assertEqual(mv.tobytes(), b"xello") + with memoryview._from_flags(mutable, inspect.BufferFlags.SIMPLE) as mv: + self.assertEqual(mv.tobytes(), b"xello") + with self.assertRaises(TypeError): + mv[0] = ord(b'h') + self.assertEqual(mv.tobytes(), b"xello") + with memoryview._from_flags(immutable, inspect.BufferFlags.SIMPLE) as mv: + self.assertEqual(mv.tobytes(), b"hello") + with self.assertRaises(TypeError): + mv[0] = ord(b'x') + self.assertEqual(mv.tobytes(), b"hello") + + with self.assertRaises(RuntimeError): + memoryview._from_flags(immutable, inspect.BufferFlags.WRITABLE) + with memoryview(immutable) as mv: + self.assertEqual(mv.tobytes(), b"hello") + with self.assertRaises(TypeError): + mv[0] = ord(b'x') + self.assertEqual(mv.tobytes(), b"hello") + + def test_call_builtins(self): + ba = bytearray(b"hello") + mv = ba.__buffer__(0) + self.assertEqual(mv.tobytes(), b"hello") + ba.__release_buffer__(mv) + with self.assertRaises(OverflowError): + ba.__buffer__(sys.maxsize + 1) + + @unittest.skipIf(_testcapi is None, "requires _testcapi") + def test_c_buffer(self): + buf = _testcapi.testBuf() + self.assertEqual(buf.references, 0) + mv = buf.__buffer__(0) + self.assertIsInstance(mv, memoryview) + self.assertEqual(mv.tobytes(), b"test") + self.assertEqual(buf.references, 1) + buf.__release_buffer__(mv) + self.assertEqual(buf.references, 0) + with self.assertRaises(ValueError): + mv.tobytes() + # Calling it again doesn't cause issues + with self.assertRaises(ValueError): + buf.__release_buffer__(mv) + self.assertEqual(buf.references, 0) + + def test_inheritance(self): + class A(bytearray): + def __buffer__(self, flags): + return super().__buffer__(flags) + + a = A(b"hello") + mv = memoryview(a) + self.assertEqual(mv.tobytes(), b"hello") + + def test_inheritance_releasebuffer(self): + rb_call_count = 0 + class B(bytearray): + def __buffer__(self, flags): + return super().__buffer__(flags) + def __release_buffer__(self, view): + nonlocal rb_call_count + rb_call_count += 1 + super().__release_buffer__(view) + + b = B(b"hello") + with memoryview(b) as mv: + self.assertEqual(mv.tobytes(), b"hello") + self.assertEqual(rb_call_count, 0) + self.assertEqual(rb_call_count, 1) + + def test_inherit_but_return_something_else(self): + class A(bytearray): + def __buffer__(self, flags): + return memoryview(b"hello") + + a = A(b"hello") + with memoryview(a) as mv: + self.assertEqual(mv.tobytes(), b"hello") + + rb_call_count = 0 + rb_raised = False + class B(bytearray): + def __buffer__(self, flags): + return memoryview(b"hello") + def __release_buffer__(self, view): + nonlocal rb_call_count + rb_call_count += 1 + try: + super().__release_buffer__(view) + except ValueError: + nonlocal rb_raised + rb_raised = True + + b = B(b"hello") + with memoryview(b) as mv: + self.assertEqual(mv.tobytes(), b"hello") + self.assertEqual(rb_call_count, 0) + self.assertEqual(rb_call_count, 1) + self.assertIs(rb_raised, True) + + def test_override_only_release(self): + class C(bytearray): + def __release_buffer__(self, buffer): + super().__release_buffer__(buffer) + + c = C(b"hello") + with memoryview(c) as mv: + self.assertEqual(mv.tobytes(), b"hello") + + def test_release_saves_reference(self): + smuggled_buffer = None + + class C(bytearray): + def __release_buffer__(s, buffer: memoryview): + with self.assertRaises(ValueError): + memoryview(buffer) + with self.assertRaises(ValueError): + buffer.cast("b") + with self.assertRaises(ValueError): + buffer.toreadonly() + with self.assertRaises(ValueError): + buffer[:1] + with self.assertRaises(ValueError): + buffer.__buffer__(0) + nonlocal smuggled_buffer + smuggled_buffer = buffer + self.assertEqual(buffer.tobytes(), b"hello") + super().__release_buffer__(buffer) + + c = C(b"hello") + with memoryview(c) as mv: + self.assertEqual(mv.tobytes(), b"hello") + c.clear() + with self.assertRaises(ValueError): + smuggled_buffer.tobytes() + + def test_release_saves_reference_no_subclassing(self): + ba = bytearray(b"hello") + + class C: + def __buffer__(self, flags): + return memoryview(ba) + + def __release_buffer__(self, buffer): + self.buffer = buffer + + c = C() + with memoryview(c) as mv: + self.assertEqual(mv.tobytes(), b"hello") + self.assertEqual(c.buffer.tobytes(), b"hello") + + with self.assertRaises(BufferError): + ba.clear() + c.buffer.release() + ba.clear() + + def test_multiple_inheritance_buffer_last(self): + class A: + def __buffer__(self, flags): + return memoryview(b"hello A") + + class B(A, bytearray): + def __buffer__(self, flags): + return super().__buffer__(flags) + + b = B(b"hello") + with memoryview(b) as mv: + self.assertEqual(mv.tobytes(), b"hello A") + + class Releaser: + def __release_buffer__(self, buffer): + self.buffer = buffer + + class C(Releaser, bytearray): + def __buffer__(self, flags): + return super().__buffer__(flags) + + c = C(b"hello C") + with memoryview(c) as mv: + self.assertEqual(mv.tobytes(), b"hello C") + c.clear() + with self.assertRaises(ValueError): + c.buffer.tobytes() + + def test_multiple_inheritance_buffer_last(self): + class A: + def __buffer__(self, flags): + raise RuntimeError("should not be called") + + def __release_buffer__(self, buffer): + raise RuntimeError("should not be called") + + class B(bytearray, A): + def __buffer__(self, flags): + return super().__buffer__(flags) + + b = B(b"hello") + with memoryview(b) as mv: + self.assertEqual(mv.tobytes(), b"hello") + + class Releaser: + buffer = None + def __release_buffer__(self, buffer): + self.buffer = buffer + + class C(bytearray, Releaser): + def __buffer__(self, flags): + return super().__buffer__(flags) + + c = C(b"hello") + with memoryview(c) as mv: + self.assertEqual(mv.tobytes(), b"hello") + c.clear() + self.assertIs(c.buffer, None) + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py index 04dd8ff3070c99..821710a7fa3286 100644 --- a/Lib/test/test_builtin.py +++ b/Lib/test/test_builtin.py @@ -2372,24 +2372,31 @@ def __del__(self): @cpython_only class ImmortalTests(unittest.TestCase): - def test_immortal(self): - none_refcount = sys.getrefcount(None) - true_refcount = sys.getrefcount(True) - false_refcount = sys.getrefcount(False) - smallint_refcount = sys.getrefcount(100) - - # Assert that all of these immortal instances have large ref counts. - self.assertGreater(none_refcount, 2 ** 15) - self.assertGreater(true_refcount, 2 ** 15) - self.assertGreater(false_refcount, 2 ** 15) - self.assertGreater(smallint_refcount, 2 ** 15) - - # Confirm that the refcount doesn't change even with a new ref to them. - l = [None, True, False, 100] - self.assertEqual(sys.getrefcount(None), none_refcount) - self.assertEqual(sys.getrefcount(True), true_refcount) - self.assertEqual(sys.getrefcount(False), false_refcount) - self.assertEqual(sys.getrefcount(100), smallint_refcount) + + if sys.maxsize < (1 << 32): + IMMORTAL_REFCOUNT = (1 << 30) - 1 + else: + IMMORTAL_REFCOUNT = (1 << 32) - 1 + + IMMORTALS = (None, True, False, Ellipsis, NotImplemented, *range(-5, 257)) + + def assert_immortal(self, immortal): + with self.subTest(immortal): + self.assertEqual(sys.getrefcount(immortal), self.IMMORTAL_REFCOUNT) + + def test_immortals(self): + for immortal in self.IMMORTALS: + self.assert_immortal(immortal) + + def test_list_repeat_respect_immortality(self): + refs = list(self.IMMORTALS) * 42 + for immortal in self.IMMORTALS: + self.assert_immortal(immortal) + + def test_tuple_repeat_respect_immortality(self): + refs = tuple(self.IMMORTALS) * 42 + for immortal in self.IMMORTALS: + self.assert_immortal(immortal) class TestType(unittest.TestCase): diff --git a/Lib/test/test_call.py b/Lib/test/test_call.py index aab7b1580eaf35..12759c53bb662c 100644 --- a/Lib/test/test_call.py +++ b/Lib/test/test_call.py @@ -10,6 +10,7 @@ import gc import contextlib import sys +import types class BadStr(str): @@ -202,6 +203,37 @@ def test_oldargs1_2_kw(self): msg = r"count\(\) takes no keyword arguments" self.assertRaisesRegex(TypeError, msg, [].count, x=2, y=2) + def test_object_not_callable(self): + msg = r"^'object' object is not callable$" + self.assertRaisesRegex(TypeError, msg, object()) + + def test_module_not_callable_no_suggestion_0(self): + msg = r"^'module' object is not callable$" + self.assertRaisesRegex(TypeError, msg, types.ModuleType("mod")) + + def test_module_not_callable_no_suggestion_1(self): + msg = r"^'module' object is not callable$" + mod = types.ModuleType("mod") + mod.mod = 42 + self.assertRaisesRegex(TypeError, msg, mod) + + def test_module_not_callable_no_suggestion_2(self): + msg = r"^'module' object is not callable$" + mod = types.ModuleType("mod") + del mod.__name__ + self.assertRaisesRegex(TypeError, msg, mod) + + def test_module_not_callable_no_suggestion_3(self): + msg = r"^'module' object is not callable$" + mod = types.ModuleType("mod") + mod.__name__ = 42 + self.assertRaisesRegex(TypeError, msg, mod) + + def test_module_not_callable_suggestion(self): + msg = r"^'module' object is not callable\. Did you mean: 'mod\.mod\(\.\.\.\)'\?$" + mod = types.ModuleType("mod") + mod.mod = lambda: ... + self.assertRaisesRegex(TypeError, msg, mod) class TestCallingConventions(unittest.TestCase): diff --git a/Lib/test/test_capi/test_immortal.py b/Lib/test/test_capi/test_immortal.py new file mode 100644 index 00000000000000..ef5d32b7f01935 --- /dev/null +++ b/Lib/test/test_capi/test_immortal.py @@ -0,0 +1,16 @@ +import unittest +from test.support import import_helper + +_testcapi = import_helper.import_module('_testcapi') + + +class TestCAPI(unittest.TestCase): + def test_immortal_builtins(self): + _testcapi.test_immortal_builtins() + + def test_immortal_small_ints(self): + _testcapi.test_immortal_small_ints() + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_capi/test_misc.py b/Lib/test/test_capi/test_misc.py index 9470cf12a7d1c4..3fc2c07f933061 100644 --- a/Lib/test/test_capi/test_misc.py +++ b/Lib/test/test_capi/test_misc.py @@ -16,11 +16,13 @@ import unittest import warnings import weakref +import operator from test import support from test.support import MISSING_C_DOCSTRINGS from test.support import import_helper from test.support import threading_helper from test.support import warnings_helper +from test.support import requires_limited_api from test.support.script_helper import assert_python_failure, assert_python_ok, run_python_until_end try: import _posixsubprocess @@ -681,6 +683,20 @@ def test_heaptype_with_custom_metaclass(self): with self.assertRaisesRegex(TypeError, msg): t = _testcapi.pytype_fromspec_meta(_testcapi.HeapCTypeMetaclassCustomNew) + def test_heaptype_with_custom_metaclass_deprecation(self): + # gh-103968: a metaclass with custom tp_new is deprecated, but still + # allowed for functions that existed in 3.11 + # (PyType_FromSpecWithBases is used here). + class Base(metaclass=_testcapi.HeapCTypeMetaclassCustomNew): + pass + + with warnings_helper.check_warnings( + ('.*custom tp_new.*in Python 3.14.*', DeprecationWarning), + ): + sub = _testcapi.make_type_with_base(Base) + self.assertTrue(issubclass(sub, Base)) + self.assertIsInstance(sub, _testcapi.HeapCTypeMetaclassCustomNew) + def test_multiple_inheritance_ctypes_with_weakref_or_dict(self): with self.assertRaises(TypeError): @@ -756,7 +772,6 @@ def meth(self): MutableBase.meth = lambda self: 'changed' self.assertEqual(instance.meth(), 'changed') - def test_pynumber_tobase(self): from _testcapi import pynumber_tobase small_number = 123 @@ -1043,6 +1058,175 @@ class dictsub(dict): ... # dict subclasses must work self.assertEqual(_testcapi.function_get_kw_defaults(some), None) self.assertEqual(some.__kwdefaults__, None) + def test_unstable_gc_new_with_extra_data(self): + class Data(_testcapi.ObjExtraData): + __slots__ = ('x', 'y') + + d = Data() + d.x = 10 + d.y = 20 + d.extra = 30 + self.assertEqual(d.x, 10) + self.assertEqual(d.y, 20) + self.assertEqual(d.extra, 30) + del d.extra + self.assertIsNone(d.extra) + + +@requires_limited_api +class TestHeapTypeRelative(unittest.TestCase): + """Test API for extending opaque types (PEP 697)""" + + @requires_limited_api + def test_heaptype_relative_sizes(self): + # Test subclassing using "relative" basicsize, see PEP 697 + def check(extra_base_size, extra_size): + Base, Sub, instance, data_ptr, data_offset, data_size = ( + _testcapi.make_sized_heaptypes( + extra_base_size, -extra_size)) + + # no alignment shenanigans when inheriting directly + if extra_size == 0: + self.assertEqual(Base.__basicsize__, Sub.__basicsize__) + self.assertEqual(data_size, 0) + + else: + # The following offsets should be in increasing order: + offsets = [ + (0, 'start of object'), + (Base.__basicsize__, 'end of base data'), + (data_offset, 'subclass data'), + (data_offset + extra_size, 'end of requested subcls data'), + (data_offset + data_size, 'end of reserved subcls data'), + (Sub.__basicsize__, 'end of object'), + ] + ordered_offsets = sorted(offsets, key=operator.itemgetter(0)) + self.assertEqual( + offsets, ordered_offsets, + msg=f'Offsets not in expected order, got: {ordered_offsets}') + + # end of reserved subcls data == end of object + self.assertEqual(Sub.__basicsize__, data_offset + data_size) + + # we don't reserve (requested + alignment) or more data + self.assertLess(data_size - extra_size, + _testcapi.ALIGNOF_MAX_ALIGN_T) + + # The offsets/sizes we calculated should be aligned. + self.assertEqual(data_offset % _testcapi.ALIGNOF_MAX_ALIGN_T, 0) + self.assertEqual(data_size % _testcapi.ALIGNOF_MAX_ALIGN_T, 0) + + sizes = sorted({0, 1, 2, 3, 4, 7, 8, 123, + object.__basicsize__, + object.__basicsize__-1, + object.__basicsize__+1}) + for extra_base_size in sizes: + for extra_size in sizes: + args = dict(extra_base_size=extra_base_size, + extra_size=extra_size) + with self.subTest(**args): + check(**args) + + def test_HeapCCollection(self): + """Make sure HeapCCollection works properly by itself""" + collection = _testcapi.HeapCCollection(1, 2, 3) + self.assertEqual(list(collection), [1, 2, 3]) + + def test_heaptype_inherit_itemsize(self): + """Test HeapCCollection subclasses work properly""" + sizes = sorted({0, 1, 2, 3, 4, 7, 8, 123, + object.__basicsize__, + object.__basicsize__-1, + object.__basicsize__+1}) + for extra_size in sizes: + with self.subTest(extra_size=extra_size): + Sub = _testcapi.subclass_var_heaptype( + _testcapi.HeapCCollection, -extra_size, 0, 0) + collection = Sub(1, 2, 3) + collection.set_data_to_3s() + + self.assertEqual(list(collection), [1, 2, 3]) + mem = collection.get_data() + self.assertGreaterEqual(len(mem), extra_size) + self.assertTrue(set(mem) <= {3}, f'got {mem!r}') + + def test_heaptype_invalid_inheritance(self): + with self.assertRaises(SystemError, + msg="Cannot extend variable-size class without " + + "Py_TPFLAGS_ITEMS_AT_END"): + _testcapi.subclass_heaptype(int, -8, 0) + + def test_heaptype_relative_members(self): + """Test HeapCCollection subclasses work properly""" + sizes = sorted({0, 1, 2, 3, 4, 7, 8, 123, + object.__basicsize__, + object.__basicsize__-1, + object.__basicsize__+1}) + for extra_base_size in sizes: + for extra_size in sizes: + for offset in sizes: + with self.subTest(extra_base_size=extra_base_size, extra_size=extra_size, offset=offset): + if offset < extra_size: + Sub = _testcapi.make_heaptype_with_member( + extra_base_size, -extra_size, offset, True) + Base = Sub.mro()[1] + instance = Sub() + self.assertEqual(instance.memb, instance.get_memb()) + instance.set_memb(13) + self.assertEqual(instance.memb, instance.get_memb()) + self.assertEqual(instance.get_memb(), 13) + instance.memb = 14 + self.assertEqual(instance.memb, instance.get_memb()) + self.assertEqual(instance.get_memb(), 14) + self.assertGreaterEqual(instance.get_memb_offset(), Base.__basicsize__) + self.assertLess(instance.get_memb_offset(), Sub.__basicsize__) + with self.assertRaises(SystemError): + instance.get_memb_relative() + with self.assertRaises(SystemError): + instance.set_memb_relative(0) + else: + with self.assertRaises(SystemError): + Sub = _testcapi.make_heaptype_with_member( + extra_base_size, -extra_size, offset, True) + with self.assertRaises(SystemError): + Sub = _testcapi.make_heaptype_with_member( + extra_base_size, extra_size, offset, True) + with self.subTest(extra_base_size=extra_base_size, extra_size=extra_size): + with self.assertRaises(SystemError): + Sub = _testcapi.make_heaptype_with_member( + extra_base_size, -extra_size, -1, True) + + def test_heaptype_relative_members_errors(self): + with self.assertRaisesRegex( + SystemError, + r"With Py_RELATIVE_OFFSET, basicsize must be negative"): + _testcapi.make_heaptype_with_member(0, 1234, 0, True) + with self.assertRaisesRegex( + SystemError, r"Member offset out of range \(0\.\.-basicsize\)"): + _testcapi.make_heaptype_with_member(0, -8, 1234, True) + with self.assertRaisesRegex( + SystemError, r"Member offset out of range \(0\.\.-basicsize\)"): + _testcapi.make_heaptype_with_member(0, -8, -1, True) + + Sub = _testcapi.make_heaptype_with_member(0, -8, 0, True) + instance = Sub() + with self.assertRaisesRegex( + SystemError, r"PyMember_GetOne used with Py_RELATIVE_OFFSET"): + instance.get_memb_relative() + with self.assertRaisesRegex( + SystemError, r"PyMember_SetOne used with Py_RELATIVE_OFFSET"): + instance.set_memb_relative(0) + + def test_pyobject_getitemdata_error(self): + """Test PyObject_GetItemData fails on unsupported types""" + with self.assertRaises(TypeError): + # None is not variable-length + _testcapi.pyobject_getitemdata(None) + with self.assertRaises(TypeError): + # int is variable-length, but doesn't have the + # Py_TPFLAGS_ITEMS_AT_END layout (and flag) + _testcapi.pyobject_getitemdata(0) + class TestPendingCalls(unittest.TestCase): @@ -1217,23 +1401,37 @@ def test_configured_settings(self): DAEMON_THREADS = 1<<11 FORK = 1<<15 EXEC = 1<<16 - - features = ['obmalloc', 'fork', 'exec', 'threads', 'daemon_threads', - 'extensions'] + ALL_FLAGS = (OBMALLOC | FORK | EXEC | THREADS | DAEMON_THREADS + | EXTENSIONS); + + features = [ + 'obmalloc', + 'fork', + 'exec', + 'threads', + 'daemon_threads', + 'extensions', + 'own_gil', + ] kwlist = [f'allow_{n}' for n in features] kwlist[0] = 'use_main_obmalloc' - kwlist[-1] = 'check_multi_interp_extensions' + kwlist[-2] = 'check_multi_interp_extensions' + kwlist[-1] = 'own_gil' # expected to work for config, expected in { - (True, True, True, True, True, True): - OBMALLOC | FORK | EXEC | THREADS | DAEMON_THREADS | EXTENSIONS, - (True, False, False, False, False, False): OBMALLOC, - (False, False, False, True, False, True): THREADS | EXTENSIONS, + (True, True, True, True, True, True, True): + (ALL_FLAGS, True), + (True, False, False, False, False, False, False): + (OBMALLOC, False), + (False, False, False, True, False, True, False): + (THREADS | EXTENSIONS, False), }.items(): kwargs = dict(zip(kwlist, config)) + exp_flags, exp_gil = expected expected = { - 'feature_flags': expected, + 'feature_flags': exp_flags, + 'own_gil': exp_gil, } with self.subTest(config): r, w = os.pipe() @@ -1253,7 +1451,7 @@ def test_configured_settings(self): # expected to fail for config in [ - (False, False, False, False, False, False), + (False, False, False, False, False, False, False), ]: kwargs = dict(zip(kwlist, config)) with self.subTest(config): @@ -1289,6 +1487,7 @@ def test_overridden_setting_extensions_subinterp_check(self): 'allow_exec': True, 'allow_threads': True, 'allow_daemon_threads': True, + 'own_gil': False, } def check(enabled, override): @@ -1299,6 +1498,7 @@ def check(enabled, override): flags = BASE_FLAGS | EXTENSIONS if enabled else BASE_FLAGS settings = { 'feature_flags': flags, + 'own_gil': False, } expected = { diff --git a/Lib/test/test_capi/test_unicode.py b/Lib/test/test_capi/test_unicode.py index 857579f758386f..00807d968a7c43 100644 --- a/Lib/test/test_capi/test_unicode.py +++ b/Lib/test/test_capi/test_unicode.py @@ -17,6 +17,287 @@ class Str(str): class CAPITest(unittest.TestCase): + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_new(self): + """Test PyUnicode_New()""" + from _testcapi import unicode_new as new + + for maxchar in 0, 0x61, 0xa1, 0x4f60, 0x1f600, 0x10ffff: + self.assertEqual(new(0, maxchar), '') + self.assertEqual(new(5, maxchar), chr(maxchar)*5) + self.assertEqual(new(0, 0x110000), '') + self.assertRaises(SystemError, new, 5, 0x110000) + self.assertRaises(SystemError, new, -1, 0) + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_fill(self): + """Test PyUnicode_Fill()""" + from _testcapi import unicode_fill as fill + + strings = [ + # all strings have exactly 5 characters + 'abcde', '\xa1\xa2\xa3\xa4\xa5', + '\u4f60\u597d\u4e16\u754c\uff01', + '\U0001f600\U0001f601\U0001f602\U0001f603\U0001f604' + ] + chars = [0x78, 0xa9, 0x20ac, 0x1f638] + + for idx, fill_char in enumerate(chars): + # wide -> narrow: exceed maxchar limitation + for to in strings[:idx]: + self.assertRaises(ValueError, fill, to, 0, 0, fill_char) + for to in strings[idx:]: + for start in range(7): + for length in range(-1, 7 - start): + filled = max(min(length, 5 - start), 0) + if filled == 5 and to != strings[idx]: + # narrow -> wide + # Tests omitted since this creates invalid strings. + continue + expected = to[:start] + chr(fill_char) * filled + to[start + filled:] + self.assertEqual(fill(to, start, length, fill_char), + (expected, filled)) + + s = strings[0] + self.assertRaises(IndexError, fill, s, -1, 0, 0x78) + self.assertRaises(ValueError, fill, s, 0, 0, 0x110000) + self.assertRaises(SystemError, fill, b'abc', 0, 0, 0x78) + self.assertRaises(SystemError, fill, [], 0, 0, 0x78) + # CRASHES fill(s, 0, NULL, 0, 0) + # CRASHES fill(NULL, 0, 0, 0x78) + # TODO: Test PyUnicode_Fill() with non-modifiable unicode. + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_writechar(self): + """Test PyUnicode_ReadChar()""" + from _testcapi import unicode_writechar as writechar + + strings = [ + # one string for every kind + 'abc', '\xa1\xa2\xa3', '\u4f60\u597d\u4e16', + '\U0001f600\U0001f601\U0001f602' + ] + # one character for every kind + out of range code + chars = [0x78, 0xa9, 0x20ac, 0x1f638, 0x110000] + for i, s in enumerate(strings): + for j, c in enumerate(chars): + if j <= i: + self.assertEqual(writechar(s, 1, c), + (s[:1] + chr(c) + s[2:], 0)) + else: + self.assertRaises(ValueError, writechar, s, 1, c) + + self.assertRaises(IndexError, writechar, 'abc', 3, 0x78) + self.assertRaises(IndexError, writechar, 'abc', -1, 0x78) + self.assertRaises(TypeError, writechar, b'abc', 0, 0x78) + self.assertRaises(TypeError, writechar, [], 0, 0x78) + # CRASHES writechar(NULL, 0, 0x78) + # TODO: Test PyUnicode_CopyCharacters() with non-modifiable and legacy + # unicode. + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_resize(self): + """Test PyUnicode_Resize()""" + from _testcapi import unicode_resize as resize + + strings = [ + # all strings have exactly 3 characters + 'abc', '\xa1\xa2\xa3', '\u4f60\u597d\u4e16', + '\U0001f600\U0001f601\U0001f602' + ] + for s in strings: + self.assertEqual(resize(s, 3), (s, 0)) + self.assertEqual(resize(s, 2), (s[:2], 0)) + self.assertEqual(resize(s, 4), (s + '\0', 0)) + self.assertEqual(resize(s, 0), ('', 0)) + self.assertRaises(SystemError, resize, b'abc', 0) + self.assertRaises(SystemError, resize, [], 0) + self.assertRaises(SystemError, resize, NULL, 0) + # TODO: Test PyUnicode_Resize() with non-modifiable and legacy unicode + # and with NULL as the address. + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_append(self): + """Test PyUnicode_Append()""" + from _testcapi import unicode_append as append + + strings = [ + 'abc', '\xa1\xa2\xa3', '\u4f60\u597d\u4e16', + '\U0001f600\U0001f601\U0001f602' + ] + for left in strings: + left = left[::-1] + for right in strings: + expected = left + right + self.assertEqual(append(left, right), expected) + + self.assertRaises(SystemError, append, 'abc', b'abc') + self.assertRaises(SystemError, append, b'abc', 'abc') + self.assertRaises(SystemError, append, b'abc', b'abc') + self.assertRaises(SystemError, append, 'abc', []) + self.assertRaises(SystemError, append, [], 'abc') + self.assertRaises(SystemError, append, [], []) + self.assertRaises(SystemError, append, NULL, 'abc') + self.assertRaises(SystemError, append, 'abc', NULL) + # TODO: Test PyUnicode_Append() with modifiable unicode + # and with NULL as the address. + # TODO: Check reference counts. + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_appendanddel(self): + """Test PyUnicode_AppendAndDel()""" + from _testcapi import unicode_appendanddel as appendanddel + + strings = [ + 'abc', '\xa1\xa2\xa3', '\u4f60\u597d\u4e16', + '\U0001f600\U0001f601\U0001f602' + ] + for left in strings: + left = left[::-1] + for right in strings: + self.assertEqual(appendanddel(left, right), left + right) + + self.assertRaises(SystemError, appendanddel, 'abc', b'abc') + self.assertRaises(SystemError, appendanddel, b'abc', 'abc') + self.assertRaises(SystemError, appendanddel, b'abc', b'abc') + self.assertRaises(SystemError, appendanddel, 'abc', []) + self.assertRaises(SystemError, appendanddel, [], 'abc') + self.assertRaises(SystemError, appendanddel, [], []) + self.assertRaises(SystemError, appendanddel, NULL, 'abc') + self.assertRaises(SystemError, appendanddel, 'abc', NULL) + # TODO: Test PyUnicode_AppendAndDel() with modifiable unicode + # and with NULL as the address. + # TODO: Check reference counts. + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_fromstringandsize(self): + """Test PyUnicode_FromStringAndSize()""" + from _testcapi import unicode_fromstringandsize as fromstringandsize + + self.assertEqual(fromstringandsize(b'abc'), 'abc') + self.assertEqual(fromstringandsize(b'abc', 2), 'ab') + self.assertEqual(fromstringandsize(b'abc\0def'), 'abc\0def') + self.assertEqual(fromstringandsize(b'\xc2\xa1\xc2\xa2'), '\xa1\xa2') + self.assertEqual(fromstringandsize(b'\xe4\xbd\xa0'), '\u4f60') + self.assertEqual(fromstringandsize(b'\xf0\x9f\x98\x80'), '\U0001f600') + self.assertRaises(UnicodeDecodeError, fromstringandsize, b'\xc2\xa1', 1) + self.assertRaises(UnicodeDecodeError, fromstringandsize, b'\xa1', 1) + self.assertEqual(fromstringandsize(b'', 0), '') + self.assertEqual(fromstringandsize(NULL, 0), '') + + self.assertRaises(SystemError, fromstringandsize, b'abc', -1) + # TODO: Test PyUnicode_FromStringAndSize(NULL, size) for size != 0 + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_fromstring(self): + """Test PyUnicode_FromString()""" + from _testcapi import unicode_fromstring as fromstring + + self.assertEqual(fromstring(b'abc'), 'abc') + self.assertEqual(fromstring(b'\xc2\xa1\xc2\xa2'), '\xa1\xa2') + self.assertEqual(fromstring(b'\xe4\xbd\xa0'), '\u4f60') + self.assertEqual(fromstring(b'\xf0\x9f\x98\x80'), '\U0001f600') + self.assertRaises(UnicodeDecodeError, fromstring, b'\xc2') + self.assertRaises(UnicodeDecodeError, fromstring, b'\xa1') + self.assertEqual(fromstring(b''), '') + + # CRASHES fromstring(NULL) + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_fromkindanddata(self): + """Test PyUnicode_FromKindAndData()""" + from _testcapi import unicode_fromkindanddata as fromkindanddata + + strings = [ + 'abcde', '\xa1\xa2\xa3\xa4\xa5', + '\u4f60\u597d\u4e16\u754c\uff01', + '\U0001f600\U0001f601\U0001f602\U0001f603\U0001f604' + ] + enc1 = 'latin1' + for s in strings[:2]: + self.assertEqual(fromkindanddata(1, s.encode(enc1)), s) + enc2 = 'utf-16le' if sys.byteorder == 'little' else 'utf-16be' + for s in strings[:3]: + self.assertEqual(fromkindanddata(2, s.encode(enc2)), s) + enc4 = 'utf-32le' if sys.byteorder == 'little' else 'utf-32be' + for s in strings: + self.assertEqual(fromkindanddata(4, s.encode(enc4)), s) + self.assertEqual(fromkindanddata(2, '\U0001f600'.encode(enc2)), + '\ud83d\ude00') + for kind in 1, 2, 4: + self.assertEqual(fromkindanddata(kind, b''), '') + self.assertEqual(fromkindanddata(kind, b'\0'*kind), '\0') + self.assertEqual(fromkindanddata(kind, NULL, 0), '') + + for kind in -1, 0, 3, 5, 8: + self.assertRaises(SystemError, fromkindanddata, kind, b'') + self.assertRaises(ValueError, fromkindanddata, 1, b'abc', -1) + self.assertRaises(ValueError, fromkindanddata, 1, NULL, -1) + # CRASHES fromkindanddata(1, NULL, 1) + # CRASHES fromkindanddata(4, b'\xff\xff\xff\xff') + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_substring(self): + """Test PyUnicode_Substring()""" + from _testcapi import unicode_substring as substring + + strings = [ + 'ab', 'ab\xa1\xa2', + 'ab\xa1\xa2\u4f60\u597d', + 'ab\xa1\xa2\u4f60\u597d\U0001f600\U0001f601' + ] + for s in strings: + for start in range(0, len(s) + 2): + for end in range(max(start-1, 0), len(s) + 2): + self.assertEqual(substring(s, start, end), s[start:end]) + + self.assertRaises(IndexError, substring, 'abc', -1, 0) + self.assertRaises(IndexError, substring, 'abc', 0, -1) + # CRASHES substring(b'abc', 0, 0) + # CRASHES substring([], 0, 0) + # CRASHES substring(NULL, 0, 0) + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_getlength(self): + """Test PyUnicode_GetLength()""" + from _testcapi import unicode_getlength as getlength + + for s in ['abc', '\xa1\xa2', '\u4f60\u597d', 'a\U0001f600', + 'a\ud800b\udfffc', '\ud834\udd1e']: + self.assertEqual(getlength(s), len(s)) + + self.assertRaises(TypeError, getlength, b'abc') + self.assertRaises(TypeError, getlength, []) + # CRASHES getlength(NULL) + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_readchar(self): + """Test PyUnicode_ReadChar()""" + from _testcapi import unicode_readchar as readchar + + for s in ['abc', '\xa1\xa2', '\u4f60\u597d', 'a\U0001f600', + 'a\ud800b\udfffc', '\ud834\udd1e']: + for i, c in enumerate(s): + self.assertEqual(readchar(s, i), ord(c)) + self.assertRaises(IndexError, readchar, s, len(s)) + self.assertRaises(IndexError, readchar, s, -1) + + self.assertRaises(TypeError, readchar, b'abc', 0) + self.assertRaises(TypeError, readchar, [], 0) + # CRASHES readchar(NULL, 0) + @support.cpython_only @unittest.skipIf(_testcapi is None, 'need _testcapi module') def test_fromobject(self): @@ -293,13 +574,70 @@ def check_format(expected, format, *args): self.assertRaisesRegex(SystemError, 'invalid format string', PyUnicode_FromFormat, b'%+i', c_int(10)) + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_interninplace(self): + """Test PyUnicode_InternInPlace()""" + from _testcapi import unicode_interninplace as interninplace + + s = b'abc'.decode() + r = interninplace(s) + self.assertEqual(r, 'abc') + + # CRASHES interninplace(b'abc') + # CRASHES interninplace(NULL) + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_internfromstring(self): + """Test PyUnicode_InternFromString()""" + from _testcapi import unicode_internfromstring as internfromstring + + self.assertEqual(internfromstring(b'abc'), 'abc') + self.assertEqual(internfromstring(b'\xf0\x9f\x98\x80'), '\U0001f600') + self.assertRaises(UnicodeDecodeError, internfromstring, b'\xc2') + self.assertRaises(UnicodeDecodeError, internfromstring, b'\xa1') + self.assertEqual(internfromstring(b''), '') + + # CRASHES internfromstring(NULL) + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_fromwidechar(self): + """Test PyUnicode_FromWideChar()""" + from _testcapi import unicode_fromwidechar as fromwidechar + from _testcapi import SIZEOF_WCHAR_T + + if SIZEOF_WCHAR_T == 2: + encoding = 'utf-16le' if sys.byteorder == 'little' else 'utf-16be' + elif SIZEOF_WCHAR_T == 4: + encoding = 'utf-32le' if sys.byteorder == 'little' else 'utf-32be' + + for s in '', 'abc', '\xa1\xa2', '\u4f60', '\U0001f600': + b = s.encode(encoding) + self.assertEqual(fromwidechar(b), s) + self.assertEqual(fromwidechar(b + b'\0'*SIZEOF_WCHAR_T, -1), s) + for s in '\ud83d', '\ude00': + b = s.encode(encoding, 'surrogatepass') + self.assertEqual(fromwidechar(b), s) + self.assertEqual(fromwidechar(b + b'\0'*SIZEOF_WCHAR_T, -1), s) + + self.assertEqual(fromwidechar('abc'.encode(encoding), 2), 'ab') + if SIZEOF_WCHAR_T == 2: + self.assertEqual(fromwidechar('a\U0001f600'.encode(encoding), 2), 'a\ud83d') + + self.assertRaises(SystemError, fromwidechar, b'\0'*SIZEOF_WCHAR_T, -2) + self.assertEqual(fromwidechar(NULL, 0), '') + self.assertRaises(SystemError, fromwidechar, NULL, 1) + self.assertRaises(SystemError, fromwidechar, NULL, -1) + @support.cpython_only @unittest.skipIf(_testcapi is None, 'need _testcapi module') def test_aswidechar(self): """Test PyUnicode_AsWideChar()""" from _testcapi import unicode_aswidechar - import_helper.import_module('ctypes') - from ctypes import c_wchar, sizeof + from _testcapi import unicode_aswidechar_null + from _testcapi import SIZEOF_WCHAR_T wchar, size = unicode_aswidechar('abcdef', 2) self.assertEqual(size, 2) @@ -308,6 +646,8 @@ def test_aswidechar(self): wchar, size = unicode_aswidechar('abc', 3) self.assertEqual(size, 3) self.assertEqual(wchar, 'abc') + self.assertEqual(unicode_aswidechar_null('abc', 10), 4) + self.assertEqual(unicode_aswidechar_null('abc', 0), 4) wchar, size = unicode_aswidechar('abc', 4) self.assertEqual(size, 3) @@ -320,60 +660,113 @@ def test_aswidechar(self): wchar, size = unicode_aswidechar('abc\0def', 20) self.assertEqual(size, 7) self.assertEqual(wchar, 'abc\0def\0') + self.assertEqual(unicode_aswidechar_null('abc\0def', 20), 8) nonbmp = chr(0x10ffff) - if sizeof(c_wchar) == 2: - buflen = 3 + if SIZEOF_WCHAR_T == 2: nchar = 2 - else: # sizeof(c_wchar) == 4 - buflen = 2 + else: # SIZEOF_WCHAR_T == 4 nchar = 1 - wchar, size = unicode_aswidechar(nonbmp, buflen) + wchar, size = unicode_aswidechar(nonbmp, 10) self.assertEqual(size, nchar) self.assertEqual(wchar, nonbmp + '\0') + self.assertEqual(unicode_aswidechar_null(nonbmp, 10), nchar + 1) + + self.assertRaises(TypeError, unicode_aswidechar, b'abc', 10) + self.assertRaises(TypeError, unicode_aswidechar, [], 10) + self.assertRaises(SystemError, unicode_aswidechar, NULL, 10) + self.assertRaises(TypeError, unicode_aswidechar_null, b'abc', 10) + self.assertRaises(TypeError, unicode_aswidechar_null, [], 10) + self.assertRaises(SystemError, unicode_aswidechar_null, NULL, 10) @support.cpython_only @unittest.skipIf(_testcapi is None, 'need _testcapi module') def test_aswidecharstring(self): """Test PyUnicode_AsWideCharString()""" from _testcapi import unicode_aswidecharstring - import_helper.import_module('ctypes') - from ctypes import c_wchar, sizeof + from _testcapi import unicode_aswidecharstring_null + from _testcapi import SIZEOF_WCHAR_T wchar, size = unicode_aswidecharstring('abc') self.assertEqual(size, 3) self.assertEqual(wchar, 'abc\0') + self.assertEqual(unicode_aswidecharstring_null('abc'), 'abc') wchar, size = unicode_aswidecharstring('abc\0def') self.assertEqual(size, 7) self.assertEqual(wchar, 'abc\0def\0') + self.assertRaises(ValueError, unicode_aswidecharstring_null, 'abc\0def') nonbmp = chr(0x10ffff) - if sizeof(c_wchar) == 2: + if SIZEOF_WCHAR_T == 2: nchar = 2 - else: # sizeof(c_wchar) == 4 + else: # SIZEOF_WCHAR_T == 4 nchar = 1 wchar, size = unicode_aswidecharstring(nonbmp) self.assertEqual(size, nchar) self.assertEqual(wchar, nonbmp + '\0') + self.assertEqual(unicode_aswidecharstring_null(nonbmp), nonbmp) + + self.assertRaises(TypeError, unicode_aswidecharstring, b'abc') + self.assertRaises(TypeError, unicode_aswidecharstring, []) + self.assertRaises(SystemError, unicode_aswidecharstring, NULL) + self.assertRaises(TypeError, unicode_aswidecharstring_null, b'abc') + self.assertRaises(TypeError, unicode_aswidecharstring_null, []) + self.assertRaises(SystemError, unicode_aswidecharstring_null, NULL) @support.cpython_only @unittest.skipIf(_testcapi is None, 'need _testcapi module') def test_asucs4(self): """Test PyUnicode_AsUCS4()""" from _testcapi import unicode_asucs4 + for s in ['abc', '\xa1\xa2', '\u4f60\u597d', 'a\U0001f600', 'a\ud800b\udfffc', '\ud834\udd1e']: l = len(s) - self.assertEqual(unicode_asucs4(s, l, True), s+'\0') - self.assertEqual(unicode_asucs4(s, l, False), s+'\uffff') - self.assertEqual(unicode_asucs4(s, l+1, True), s+'\0\uffff') - self.assertEqual(unicode_asucs4(s, l+1, False), s+'\0\uffff') - self.assertRaises(SystemError, unicode_asucs4, s, l-1, True) - self.assertRaises(SystemError, unicode_asucs4, s, l-2, False) + self.assertEqual(unicode_asucs4(s, l, 1), s+'\0') + self.assertEqual(unicode_asucs4(s, l, 0), s+'\uffff') + self.assertEqual(unicode_asucs4(s, l+1, 1), s+'\0\uffff') + self.assertEqual(unicode_asucs4(s, l+1, 0), s+'\0\uffff') + self.assertRaises(SystemError, unicode_asucs4, s, l-1, 1) + self.assertRaises(SystemError, unicode_asucs4, s, l-2, 0) + s = '\0'.join([s, s]) + self.assertEqual(unicode_asucs4(s, len(s), 1), s+'\0') + self.assertEqual(unicode_asucs4(s, len(s), 0), s+'\uffff') + + # CRASHES unicode_asucs4(b'abc', 1, 0) + # CRASHES unicode_asucs4(b'abc', 1, 1) + # CRASHES unicode_asucs4([], 1, 1) + # CRASHES unicode_asucs4(NULL, 1, 0) + # CRASHES unicode_asucs4(NULL, 1, 1) + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_asucs4copy(self): + """Test PyUnicode_AsUCS4Copy()""" + from _testcapi import unicode_asucs4copy as asucs4copy + + for s in ['abc', '\xa1\xa2', '\u4f60\u597d', 'a\U0001f600', + 'a\ud800b\udfffc', '\ud834\udd1e']: + self.assertEqual(asucs4copy(s), s+'\0') s = '\0'.join([s, s]) - self.assertEqual(unicode_asucs4(s, len(s), True), s+'\0') - self.assertEqual(unicode_asucs4(s, len(s), False), s+'\uffff') + self.assertEqual(asucs4copy(s), s+'\0') + + # CRASHES asucs4copy(b'abc') + # CRASHES asucs4copy([]) + # CRASHES asucs4copy(NULL) + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_fromordinal(self): + """Test PyUnicode_FromOrdinal()""" + from _testcapi import unicode_fromordinal as fromordinal + + self.assertEqual(fromordinal(0x61), 'a') + self.assertEqual(fromordinal(0x20ac), '\u20ac') + self.assertEqual(fromordinal(0x1f600), '\U0001f600') + + self.assertRaises(ValueError, fromordinal, 0x110000) + self.assertRaises(ValueError, fromordinal, -1) @support.cpython_only @unittest.skipIf(_testcapi is None, 'need _testcapi module') @@ -381,29 +774,62 @@ def test_asutf8(self): """Test PyUnicode_AsUTF8()""" from _testcapi import unicode_asutf8 - bmp = '\u0100' - bmp2 = '\uffff' - nonbmp = chr(0x10ffff) + self.assertEqual(unicode_asutf8('abc', 4), b'abc\0') + self.assertEqual(unicode_asutf8('абв', 7), b'\xd0\xb0\xd0\xb1\xd0\xb2\0') + self.assertEqual(unicode_asutf8('\U0001f600', 5), b'\xf0\x9f\x98\x80\0') + self.assertEqual(unicode_asutf8('abc\0def', 8), b'abc\0def\0') - self.assertEqual(unicode_asutf8(bmp), b'\xc4\x80') - self.assertEqual(unicode_asutf8(bmp2), b'\xef\xbf\xbf') - self.assertEqual(unicode_asutf8(nonbmp), b'\xf4\x8f\xbf\xbf') - self.assertRaises(UnicodeEncodeError, unicode_asutf8, 'a\ud800b\udfffc') + self.assertRaises(UnicodeEncodeError, unicode_asutf8, '\ud8ff', 0) + self.assertRaises(TypeError, unicode_asutf8, b'abc', 0) + self.assertRaises(TypeError, unicode_asutf8, [], 0) + # CRASHES unicode_asutf8(NULL, 0) @support.cpython_only @unittest.skipIf(_testcapi is None, 'need _testcapi module') def test_asutf8andsize(self): """Test PyUnicode_AsUTF8AndSize()""" from _testcapi import unicode_asutf8andsize + from _testcapi import unicode_asutf8andsize_null - bmp = '\u0100' - bmp2 = '\uffff' - nonbmp = chr(0x10ffff) + self.assertEqual(unicode_asutf8andsize('abc', 4), (b'abc\0', 3)) + self.assertEqual(unicode_asutf8andsize('абв', 7), (b'\xd0\xb0\xd0\xb1\xd0\xb2\0', 6)) + self.assertEqual(unicode_asutf8andsize('\U0001f600', 5), (b'\xf0\x9f\x98\x80\0', 4)) + self.assertEqual(unicode_asutf8andsize('abc\0def', 8), (b'abc\0def\0', 7)) + self.assertEqual(unicode_asutf8andsize_null('abc', 4), b'abc\0') + self.assertEqual(unicode_asutf8andsize_null('abc\0def', 8), b'abc\0def\0') - self.assertEqual(unicode_asutf8andsize(bmp), (b'\xc4\x80', 2)) - self.assertEqual(unicode_asutf8andsize(bmp2), (b'\xef\xbf\xbf', 3)) - self.assertEqual(unicode_asutf8andsize(nonbmp), (b'\xf4\x8f\xbf\xbf', 4)) - self.assertRaises(UnicodeEncodeError, unicode_asutf8andsize, 'a\ud800b\udfffc') + self.assertRaises(UnicodeEncodeError, unicode_asutf8andsize, '\ud8ff', 0) + self.assertRaises(TypeError, unicode_asutf8andsize, b'abc', 0) + self.assertRaises(TypeError, unicode_asutf8andsize, [], 0) + # CRASHES unicode_asutf8andsize(NULL, 0) + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_getdefaultencoding(self): + """Test PyUnicode_GetDefaultEncoding()""" + from _testcapi import unicode_getdefaultencoding as getdefaultencoding + + self.assertEqual(getdefaultencoding(), b'utf-8') + + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_transform_decimal_and_space(self): + """Test _PyUnicode_TransformDecimalAndSpaceToASCII()""" + from _testcapi import unicode_transformdecimalandspacetoascii as transform_decimal + + self.assertEqual(transform_decimal('123'), + '123') + self.assertEqual(transform_decimal('\u0663.\u0661\u0664'), + '3.14') + self.assertEqual(transform_decimal("\N{EM SPACE}3.14\N{EN SPACE}"), + " 3.14 ") + self.assertEqual(transform_decimal('12\u20ac3'), + '12?') + self.assertEqual(transform_decimal(''), '') + + self.assertRaises(SystemError, transform_decimal, b'123') + self.assertRaises(SystemError, transform_decimal, []) + # CRASHES transform_decimal(NULL) @support.cpython_only @unittest.skipIf(_testcapi is None, 'need _testcapi module') @@ -858,6 +1284,7 @@ def test_copycharacters(self): from _testcapi import unicode_copycharacters strings = [ + # all strings have exactly 5 characters 'abcde', '\xa1\xa2\xa3\xa4\xa5', '\u4f60\u597d\u4e16\u754c\uff01', '\U0001f600\U0001f601\U0001f602\U0001f603\U0001f604' @@ -894,6 +1321,10 @@ def test_copycharacters(self): self.assertRaises(SystemError, unicode_copycharacters, s, 1, s, 0, 5) self.assertRaises(SystemError, unicode_copycharacters, s, 0, s, 0, -1) self.assertRaises(SystemError, unicode_copycharacters, s, 0, b'', 0, 0) + self.assertRaises(SystemError, unicode_copycharacters, s, 0, [], 0, 0) + # CRASHES unicode_copycharacters(s, 0, NULL, 0, 0) + # TODO: Test PyUnicode_CopyCharacters() with non-unicode and + # non-modifiable unicode as "to". @support.cpython_only @unittest.skipIf(_testcapi is None, 'need _testcapi module') diff --git a/Lib/test/test_clinic.py b/Lib/test/test_clinic.py index 4abf739cf52ca3..6aaf4d1ed8d560 100644 --- a/Lib/test/test_clinic.py +++ b/Lib/test/test_clinic.py @@ -99,8 +99,9 @@ def test_eol(self): # the last line of the block got corrupted. c = clinic.Clinic(clinic.CLanguage(None), filename="file") raw = "/*[clinic]\nfoo\n[clinic]*/" - cooked = c.parse(raw).splitlines() - end_line = cooked[2].rstrip() + cooked, _ = c.parse(raw) + lines = cooked.splitlines() + end_line = lines[2].rstrip() # this test is redundant, it's just here explicitly to catch # the regression test so we don't forget what it looked like self.assertNotEqual(end_line, "[clinic]*/[clinic]*/") @@ -259,7 +260,7 @@ def _test_clinic(self, input, output): c = clinic.Clinic(language, filename="file") c.parsers['inert'] = InertParser(c) c.parsers['copy'] = CopyParser(c) - computed = c.parse(input) + computed, _ = c.parse(input) self.assertEqual(output, computed) def test_clinic_1(self): @@ -1284,6 +1285,19 @@ def test_gh_99240_double_free(self): with self.assertRaisesRegex(TypeError, expected_error): ac_tester.gh_99240_double_free('a', '\0b') + def test_cloned_func_exception_message(self): + incorrect_arg = -1 # f1() and f2() accept a single str + with self.assertRaisesRegex(TypeError, "clone_f1"): + ac_tester.clone_f1(incorrect_arg) + with self.assertRaisesRegex(TypeError, "clone_f2"): + ac_tester.clone_f2(incorrect_arg) + + def test_cloned_func_with_converter_exception_message(self): + for name in "clone_with_conv_f1", "clone_with_conv_f2": + with self.subTest(name=name): + func = getattr(ac_tester, name) + self.assertEqual(func(), name) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_cmd_line_script.py b/Lib/test/test_cmd_line_script.py index d98e23855e0c19..8bf299382e9ca4 100644 --- a/Lib/test/test_cmd_line_script.py +++ b/Lib/test/test_cmd_line_script.py @@ -669,6 +669,19 @@ def test_syntaxerror_null_bytes(self): ], ) + def test_syntaxerror_null_bytes_in_multiline_string(self): + scripts = ["\n'''\nmultilinestring\0\n'''", "\nf'''\nmultilinestring\0\n'''"] # Both normal and f-strings + with os_helper.temp_dir() as script_dir: + for script in scripts: + script_name = _make_test_script(script_dir, 'script', script) + _, _, stderr = assert_python_failure(script_name) + self.assertEqual( + stderr.splitlines()[-2:], + [ b" multilinestring", + b'SyntaxError: source code cannot contain null bytes' + ] + ) + def test_consistent_sys_path_for_direct_execution(self): # This test case ensures that the following all give the same # sys.path configuration: diff --git a/Lib/test/test_collections.py b/Lib/test/test_collections.py index fb568a48396498..bb8b352518ef3e 100644 --- a/Lib/test/test_collections.py +++ b/Lib/test/test_collections.py @@ -25,7 +25,7 @@ from collections.abc import Set, MutableSet from collections.abc import Mapping, MutableMapping, KeysView, ItemsView, ValuesView from collections.abc import Sequence, MutableSequence -from collections.abc import ByteString +from collections.abc import ByteString, Buffer class TestUserObjects(unittest.TestCase): @@ -1940,14 +1940,34 @@ def assert_index_same(seq1, seq2, index_args): def test_ByteString(self): for sample in [bytes, bytearray]: - self.assertIsInstance(sample(), ByteString) + with self.assertWarns(DeprecationWarning): + self.assertIsInstance(sample(), ByteString) self.assertTrue(issubclass(sample, ByteString)) for sample in [str, list, tuple]: - self.assertNotIsInstance(sample(), ByteString) + with self.assertWarns(DeprecationWarning): + self.assertNotIsInstance(sample(), ByteString) self.assertFalse(issubclass(sample, ByteString)) - self.assertNotIsInstance(memoryview(b""), ByteString) + with self.assertWarns(DeprecationWarning): + self.assertNotIsInstance(memoryview(b""), ByteString) self.assertFalse(issubclass(memoryview, ByteString)) - self.validate_abstract_methods(ByteString, '__getitem__', '__len__') + with self.assertWarns(DeprecationWarning): + self.validate_abstract_methods(ByteString, '__getitem__', '__len__') + + with self.assertWarns(DeprecationWarning): + class X(ByteString): pass + + with self.assertWarns(DeprecationWarning): + # No metaclass conflict + class Z(ByteString, Awaitable): pass + + def test_Buffer(self): + for sample in [bytes, bytearray, memoryview]: + self.assertIsInstance(sample(b"x"), Buffer) + self.assertTrue(issubclass(sample, Buffer)) + for sample in [str, list, tuple]: + self.assertNotIsInstance(sample(), Buffer) + self.assertFalse(issubclass(sample, Buffer)) + self.validate_abstract_methods(Buffer, '__buffer__') def test_MutableSequence(self): for sample in [tuple, str, bytes]: diff --git a/Lib/test/test_compile.py b/Lib/test/test_compile.py index dca38418935b76..c68b9ce388466e 100644 --- a/Lib/test/test_compile.py +++ b/Lib/test/test_compile.py @@ -1352,14 +1352,11 @@ def test_multiline_list_comprehension(self): and x != 50)] """) compiled_code, _ = self.check_positions_against_ast(snippet) - compiled_code = compiled_code.co_consts[0] self.assertIsInstance(compiled_code, types.CodeType) self.assertOpcodeSourcePositionIs(compiled_code, 'LIST_APPEND', line=1, end_line=2, column=1, end_column=8, occurrence=1) self.assertOpcodeSourcePositionIs(compiled_code, 'JUMP_BACKWARD', line=1, end_line=2, column=1, end_column=8, occurrence=1) - self.assertOpcodeSourcePositionIs(compiled_code, 'RETURN_VALUE', - line=1, end_line=6, column=0, end_column=32, occurrence=1) def test_multiline_async_list_comprehension(self): snippet = textwrap.dedent("""\ @@ -1374,13 +1371,13 @@ async def f(): compiled_code, _ = self.check_positions_against_ast(snippet) g = {} eval(compiled_code, g) - compiled_code = g['f'].__code__.co_consts[1] + compiled_code = g['f'].__code__ self.assertIsInstance(compiled_code, types.CodeType) self.assertOpcodeSourcePositionIs(compiled_code, 'LIST_APPEND', line=2, end_line=3, column=5, end_column=12, occurrence=1) self.assertOpcodeSourcePositionIs(compiled_code, 'JUMP_BACKWARD', line=2, end_line=3, column=5, end_column=12, occurrence=1) - self.assertOpcodeSourcePositionIs(compiled_code, 'RETURN_VALUE', + self.assertOpcodeSourcePositionIs(compiled_code, 'RETURN_CONST', line=2, end_line=7, column=4, end_column=36, occurrence=1) def test_multiline_set_comprehension(self): @@ -1393,14 +1390,11 @@ def test_multiline_set_comprehension(self): and x != 50)} """) compiled_code, _ = self.check_positions_against_ast(snippet) - compiled_code = compiled_code.co_consts[0] self.assertIsInstance(compiled_code, types.CodeType) self.assertOpcodeSourcePositionIs(compiled_code, 'SET_ADD', line=1, end_line=2, column=1, end_column=8, occurrence=1) self.assertOpcodeSourcePositionIs(compiled_code, 'JUMP_BACKWARD', line=1, end_line=2, column=1, end_column=8, occurrence=1) - self.assertOpcodeSourcePositionIs(compiled_code, 'RETURN_VALUE', - line=1, end_line=6, column=0, end_column=32, occurrence=1) def test_multiline_async_set_comprehension(self): snippet = textwrap.dedent("""\ @@ -1415,13 +1409,13 @@ async def f(): compiled_code, _ = self.check_positions_against_ast(snippet) g = {} eval(compiled_code, g) - compiled_code = g['f'].__code__.co_consts[1] + compiled_code = g['f'].__code__ self.assertIsInstance(compiled_code, types.CodeType) self.assertOpcodeSourcePositionIs(compiled_code, 'SET_ADD', line=2, end_line=3, column=5, end_column=12, occurrence=1) self.assertOpcodeSourcePositionIs(compiled_code, 'JUMP_BACKWARD', line=2, end_line=3, column=5, end_column=12, occurrence=1) - self.assertOpcodeSourcePositionIs(compiled_code, 'RETURN_VALUE', + self.assertOpcodeSourcePositionIs(compiled_code, 'RETURN_CONST', line=2, end_line=7, column=4, end_column=36, occurrence=1) def test_multiline_dict_comprehension(self): @@ -1434,14 +1428,11 @@ def test_multiline_dict_comprehension(self): and x != 50)} """) compiled_code, _ = self.check_positions_against_ast(snippet) - compiled_code = compiled_code.co_consts[0] self.assertIsInstance(compiled_code, types.CodeType) self.assertOpcodeSourcePositionIs(compiled_code, 'MAP_ADD', line=1, end_line=2, column=1, end_column=7, occurrence=1) self.assertOpcodeSourcePositionIs(compiled_code, 'JUMP_BACKWARD', line=1, end_line=2, column=1, end_column=7, occurrence=1) - self.assertOpcodeSourcePositionIs(compiled_code, 'RETURN_VALUE', - line=1, end_line=6, column=0, end_column=32, occurrence=1) def test_multiline_async_dict_comprehension(self): snippet = textwrap.dedent("""\ @@ -1456,13 +1447,13 @@ async def f(): compiled_code, _ = self.check_positions_against_ast(snippet) g = {} eval(compiled_code, g) - compiled_code = g['f'].__code__.co_consts[1] + compiled_code = g['f'].__code__ self.assertIsInstance(compiled_code, types.CodeType) self.assertOpcodeSourcePositionIs(compiled_code, 'MAP_ADD', line=2, end_line=3, column=5, end_column=11, occurrence=1) self.assertOpcodeSourcePositionIs(compiled_code, 'JUMP_BACKWARD', line=2, end_line=3, column=5, end_column=11, occurrence=1) - self.assertOpcodeSourcePositionIs(compiled_code, 'RETURN_VALUE', + self.assertOpcodeSourcePositionIs(compiled_code, 'RETURN_CONST', line=2, end_line=7, column=4, end_column=36, occurrence=1) def test_matchcase_sequence(self): @@ -1711,9 +1702,6 @@ def test_column_offset_deduplication(self): for source in [ "lambda: a", "(a for b in c)", - "[a for b in c]", - "{a for b in c}", - "{a: b for c in d}", ]: with self.subTest(source): code = compile(f"{source}, {source}", "", "eval") diff --git a/Lib/test/test_compiler_assemble.py b/Lib/test/test_compiler_assemble.py new file mode 100644 index 00000000000000..3e2a127de728cd --- /dev/null +++ b/Lib/test/test_compiler_assemble.py @@ -0,0 +1,74 @@ + +import ast +import types + +from test.support.bytecode_helper import AssemblerTestCase + + +# Tests for the code-object creation stage of the compiler. + +class IsolatedAssembleTests(AssemblerTestCase): + + def complete_metadata(self, metadata, filename="myfile.py"): + if metadata is None: + metadata = {} + for key in ['name', 'qualname']: + metadata.setdefault(key, key) + for key in ['consts']: + metadata.setdefault(key, []) + for key in ['names', 'varnames', 'cellvars', 'freevars', 'fasthidden']: + metadata.setdefault(key, {}) + for key in ['argcount', 'posonlyargcount', 'kwonlyargcount']: + metadata.setdefault(key, 0) + metadata.setdefault('firstlineno', 1) + metadata.setdefault('filename', filename) + return metadata + + def assemble_test(self, insts, metadata, expected): + metadata = self.complete_metadata(metadata) + insts = self.complete_insts_info(insts) + + co = self.get_code_object(metadata['filename'], insts, metadata) + self.assertIsInstance(co, types.CodeType) + + expected_metadata = {} + for key, value in metadata.items(): + if key == "fasthidden": + # not exposed on code object + continue + if isinstance(value, list): + expected_metadata[key] = tuple(value) + elif isinstance(value, dict): + expected_metadata[key] = tuple(value.keys()) + else: + expected_metadata[key] = value + + for key, value in expected_metadata.items(): + self.assertEqual(getattr(co, "co_" + key), value) + + f = types.FunctionType(co, {}) + for args, res in expected.items(): + self.assertEqual(f(*args), res) + + def test_simple_expr(self): + metadata = { + 'filename' : 'avg.py', + 'name' : 'avg', + 'qualname' : 'stats.avg', + 'consts' : {2 : 0}, + 'argcount' : 2, + 'varnames' : {'x' : 0, 'y' : 1}, + } + + # code for "return (x+y)/2" + insts = [ + ('RESUME', 0), + ('LOAD_FAST', 0, 1), # 'x' + ('LOAD_FAST', 1, 1), # 'y' + ('BINARY_OP', 0, 1), # '+' + ('LOAD_CONST', 0, 1), # 2 + ('BINARY_OP', 11, 1), # '/' + ('RETURN_VALUE', 1), + ] + expected = {(3, 4) : 3.5, (-100, 200) : 50, (10, 18) : 14} + self.assemble_test(insts, metadata, expected) diff --git a/Lib/test/test_compiler_codegen.py b/Lib/test/test_compiler_codegen.py index 022753e0c99483..ea57df9cd2400b 100644 --- a/Lib/test/test_compiler_codegen.py +++ b/Lib/test/test_compiler_codegen.py @@ -25,6 +25,8 @@ def test_if_expression(self): ('LOAD_CONST', 2, 1), exit_lbl, ('POP_TOP', None), + ('LOAD_CONST', 3), + ('RETURN_VALUE', None), ] self.codegen_test(snippet, expected) @@ -46,5 +48,7 @@ def test_for_loop(self): ('JUMP', loop_lbl), exit_lbl, ('END_FOR', None), + ('LOAD_CONST', 0), + ('RETURN_VALUE', None), ] self.codegen_test(snippet, expected) diff --git a/Lib/test/test_cprofile.py b/Lib/test/test_cprofile.py index 98648528bc81f2..484b8f8e3a365c 100644 --- a/Lib/test/test_cprofile.py +++ b/Lib/test/test_cprofile.py @@ -25,7 +25,6 @@ def test_bad_counter_during_dealloc(self): with support.catch_unraisable_exception() as cm: obj = _lsprof.Profiler(lambda: int) obj.enable() - obj = _lsprof.Profiler(1) obj.disable() obj.clear() @@ -37,10 +36,11 @@ def test_profile_enable_disable(self): self.addCleanup(prof.disable) prof.enable() - self.assertIs(sys.getprofile(), prof) + self.assertEqual( + sys.monitoring.get_tool(sys.monitoring.PROFILER_ID), "cProfile") prof.disable() - self.assertIs(sys.getprofile(), None) + self.assertIs(sys.monitoring.get_tool(sys.monitoring.PROFILER_ID), None) def test_profile_as_context_manager(self): prof = self.profilerclass() @@ -53,10 +53,19 @@ def test_profile_as_context_manager(self): # profile should be set as the global profiler inside the # with-block - self.assertIs(sys.getprofile(), prof) + self.assertEqual( + sys.monitoring.get_tool(sys.monitoring.PROFILER_ID), "cProfile") # profile shouldn't be set once we leave the with-block. - self.assertIs(sys.getprofile(), None) + self.assertIs(sys.monitoring.get_tool(sys.monitoring.PROFILER_ID), None) + + def test_second_profiler(self): + pr = self.profilerclass() + pr2 = self.profilerclass() + pr.enable() + self.assertRaises(ValueError, pr2.enable) + pr.disable() + class TestCommandLine(unittest.TestCase): def test_sort(self): diff --git a/Lib/test/test_csv.py b/Lib/test/test_csv.py index 8fb97bc0c1a1a7..de7ac97d72cb8e 100644 --- a/Lib/test/test_csv.py +++ b/Lib/test/test_csv.py @@ -10,7 +10,7 @@ import gc import pickle from test import support -from test.support import warnings_helper +from test.support import warnings_helper, import_helper, check_disallow_instantiation from itertools import permutations from textwrap import dedent from collections import OrderedDict @@ -1430,5 +1430,12 @@ def test_subclassable(self): # issue 44089 class Foo(csv.Error): ... + @support.cpython_only + def test_disallow_instantiation(self): + _csv = import_helper.import_module("_csv") + for tp in _csv.Reader, _csv.Writer: + with self.subTest(tp=tp): + check_disallow_instantiation(self, tp) + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_dataclasses.py b/Lib/test/test_dataclasses.py index 7b48b26f9e7743..6669f1c57e2e78 100644 --- a/Lib/test/test_dataclasses.py +++ b/Lib/test/test_dataclasses.py @@ -3184,6 +3184,74 @@ def test_frozen_pickle(self): self.assertIsNot(obj, p) self.assertEqual(obj, p) + @dataclass(frozen=True, slots=True) + class FrozenSlotsGetStateClass: + foo: str + bar: int + + getstate_called: bool = field(default=False, compare=False) + + def __getstate__(self): + object.__setattr__(self, 'getstate_called', True) + return [self.foo, self.bar] + + @dataclass(frozen=True, slots=True) + class FrozenSlotsSetStateClass: + foo: str + bar: int + + setstate_called: bool = field(default=False, compare=False) + + def __setstate__(self, state): + object.__setattr__(self, 'setstate_called', True) + object.__setattr__(self, 'foo', state[0]) + object.__setattr__(self, 'bar', state[1]) + + @dataclass(frozen=True, slots=True) + class FrozenSlotsAllStateClass: + foo: str + bar: int + + getstate_called: bool = field(default=False, compare=False) + setstate_called: bool = field(default=False, compare=False) + + def __getstate__(self): + object.__setattr__(self, 'getstate_called', True) + return [self.foo, self.bar] + + def __setstate__(self, state): + object.__setattr__(self, 'setstate_called', True) + object.__setattr__(self, 'foo', state[0]) + object.__setattr__(self, 'bar', state[1]) + + def test_frozen_slots_pickle_custom_state(self): + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + with self.subTest(proto=proto): + obj = self.FrozenSlotsGetStateClass('a', 1) + dumped = pickle.dumps(obj, protocol=proto) + + self.assertTrue(obj.getstate_called) + self.assertEqual(obj, pickle.loads(dumped)) + + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + with self.subTest(proto=proto): + obj = self.FrozenSlotsSetStateClass('a', 1) + obj2 = pickle.loads(pickle.dumps(obj, protocol=proto)) + + self.assertTrue(obj2.setstate_called) + self.assertEqual(obj, obj2) + + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + with self.subTest(proto=proto): + obj = self.FrozenSlotsAllStateClass('a', 1) + dumped = pickle.dumps(obj, protocol=proto) + + self.assertTrue(obj.getstate_called) + + obj2 = pickle.loads(dumped) + self.assertTrue(obj2.setstate_called) + self.assertEqual(obj, obj2) + def test_slots_with_default_no_init(self): # Originally reported in bpo-44649. @dataclass(slots=True) diff --git a/Lib/test/test_datetime.py b/Lib/test/test_datetime.py index 7f9094fa7bd4e6..3859733a4fe65b 100644 --- a/Lib/test/test_datetime.py +++ b/Lib/test/test_datetime.py @@ -8,10 +8,12 @@ def load_tests(loader, tests, pattern): try: - pure_tests = import_fresh_module(TESTS, fresh=['datetime', '_strptime'], - blocked=['_datetime']) - fast_tests = import_fresh_module(TESTS, fresh=['datetime', - '_datetime', '_strptime']) + pure_tests = import_fresh_module(TESTS, + fresh=['datetime', '_pydatetime', '_strptime'], + blocked=['_datetime']) + fast_tests = import_fresh_module(TESTS, + fresh=['datetime', '_strptime'], + blocked=['_pydatetime']) finally: # XXX: import_fresh_module() is supposed to leave sys.module cache untouched, # XXX: but it does not, so we have to cleanup ourselves. @@ -42,6 +44,8 @@ def setUpClass(cls_, module=module): cls_._save_sys_modules = sys.modules.copy() sys.modules[TESTS] = module sys.modules['datetime'] = module.datetime_module + if hasattr(module, '_pydatetime'): + sys.modules['_pydatetime'] = module._pydatetime sys.modules['_strptime'] = module._strptime @classmethod def tearDownClass(cls_): diff --git a/Lib/test/test_dis.py b/Lib/test/test_dis.py index 5262c5c257cb89..c90702a408eb33 100644 --- a/Lib/test/test_dis.py +++ b/Lib/test/test_dis.py @@ -154,7 +154,7 @@ def bug708901(): def bug1333982(x=[]): - assert 0, ([s for s in x] + + assert 0, ((s for s in x) + 1) pass @@ -162,7 +162,7 @@ def bug1333982(x=[]): %3d RESUME 0 %3d LOAD_ASSERTION_ERROR - LOAD_CONST 1 ( at 0x..., file "%s", line %d>) + LOAD_CONST 1 ( at 0x..., file "%s", line %d>) MAKE_FUNCTION 0 LOAD_FAST 0 (x) GET_ITER @@ -247,6 +247,35 @@ def wrap_func_w_kwargs(): """ % (wrap_func_w_kwargs.__code__.co_firstlineno, wrap_func_w_kwargs.__code__.co_firstlineno + 1) +dis_intrinsic_1_2 = """\ + 0 RESUME 0 + + 1 LOAD_CONST 0 (0) + LOAD_CONST 1 (('*',)) + IMPORT_NAME 0 (math) + CALL_INTRINSIC_1 2 (INTRINSIC_IMPORT_STAR) + POP_TOP + RETURN_CONST 2 (None) +""" + +dis_intrinsic_1_5 = """\ + 0 RESUME 0 + + 1 LOAD_NAME 0 (a) + CALL_INTRINSIC_1 5 (INTRINSIC_UNARY_POSITIVE) + RETURN_VALUE +""" + +dis_intrinsic_1_6 = """\ + 0 RESUME 0 + + 1 BUILD_LIST 0 + LOAD_NAME 0 (a) + LIST_EXTEND 1 + CALL_INTRINSIC_1 6 (INTRINSIC_LIST_TO_TUPLE) + RETURN_VALUE +""" + _BIG_LINENO_FORMAT = """\ 1 RESUME 0 @@ -549,7 +578,7 @@ async def _asyncwith(c): >> COPY 3 POP_EXCEPT RERAISE 1 - >> CALL_INTRINSIC_1 3 + >> CALL_INTRINSIC_1 3 (INTRINSIC_STOPITERATION_ERROR) RERAISE 1 ExceptionTable: 12 rows @@ -646,7 +675,7 @@ async def _co(x): def _h(y): def foo(x): '''funcdoc''' - return [x + z for z in y] + return list(x + z for z in y) return foo dis_nested_0 = """\ @@ -676,13 +705,15 @@ def foo(x): %3d RESUME 0 -%3d LOAD_CLOSURE 0 (x) +%3d LOAD_GLOBAL 1 (NULL + list) + LOAD_CLOSURE 0 (x) BUILD_TUPLE 1 - LOAD_CONST 1 ( at 0x..., file "%s", line %d>) + LOAD_CONST 1 ( at 0x..., file "%s", line %d>) MAKE_FUNCTION 8 (closure) LOAD_DEREF 1 (y) GET_ITER CALL 0 + CALL 1 RETURN_VALUE """ % (dis_nested_0, __file__, @@ -694,21 +725,28 @@ def foo(x): ) dis_nested_2 = """%s -Disassembly of at 0x..., file "%s", line %d>: +Disassembly of at 0x..., file "%s", line %d>: COPY_FREE_VARS 1 -%3d RESUME 0 - BUILD_LIST 0 +%3d RETURN_GENERATOR + POP_TOP + RESUME 0 LOAD_FAST 0 (.0) - >> FOR_ITER 7 (to 26) + >> FOR_ITER 9 (to 32) STORE_FAST 1 (z) LOAD_DEREF 2 (x) LOAD_FAST 1 (z) BINARY_OP 0 (+) - LIST_APPEND 2 - JUMP_BACKWARD 9 (to 8) + YIELD_VALUE 1 + RESUME 1 + POP_TOP + JUMP_BACKWARD 11 (to 10) >> END_FOR - RETURN_VALUE + RETURN_CONST 0 (None) + >> CALL_INTRINSIC_1 3 (INTRINSIC_STOPITERATION_ERROR) + RERAISE 1 +ExceptionTable: +1 row """ % (dis_nested_1, __file__, _h.__code__.co_firstlineno + 3, @@ -942,6 +980,16 @@ def test_kw_names(self): # Test that value is displayed for KW_NAMES self.do_disassembly_test(wrap_func_w_kwargs, dis_kw_names) + def test_intrinsic_1(self): + # Test that argrepr is displayed for CALL_INTRINSIC_1 + self.do_disassembly_test("from math import *", dis_intrinsic_1_2) + self.do_disassembly_test("+a", dis_intrinsic_1_5) + self.do_disassembly_test("(*a,)", dis_intrinsic_1_6) + + def test_intrinsic_2(self): + self.assertIn("CALL_INTRINSIC_2 1 (INTRINSIC_PREP_RERAISE_STAR)", + self.get_disassembly("try: pass\nexcept* Exception: x")) + def test_big_linenos(self): def func(count): namespace = {} diff --git a/Lib/test/test_doctest.py b/Lib/test/test_doctest.py index 3491d4cdb1c18b..542fcdb5cf6f66 100644 --- a/Lib/test/test_doctest.py +++ b/Lib/test/test_doctest.py @@ -707,7 +707,7 @@ def non_Python_modules(): r""" >>> import builtins >>> tests = doctest.DocTestFinder().find(builtins) - >>> 830 < len(tests) < 850 # approximate number of objects with docstrings + >>> 830 < len(tests) < 860 # approximate number of objects with docstrings True >>> real_tests = [t for t in tests if len(t.examples) > 0] >>> len(real_tests) # objects that actually have doctests diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py index c9691bbf304915..582392ecddcb91 100644 --- a/Lib/test/test_embed.py +++ b/Lib/test/test_embed.py @@ -1666,6 +1666,7 @@ def test_init_main_interpreter_settings(self): # All optional features should be enabled. 'feature_flags': OBMALLOC | FORK | EXEC | THREADS | DAEMON_THREADS, + 'own_gil': True, } out, err = self.run_embedded_interpreter( 'test_init_main_interpreter_settings', diff --git a/Lib/test/test_epoll.py b/Lib/test/test_epoll.py index b623852f9eb4ee..c94946a6ae6b7c 100644 --- a/Lib/test/test_epoll.py +++ b/Lib/test/test_epoll.py @@ -27,6 +27,7 @@ import socket import time import unittest +from test import support if not hasattr(select, "epoll"): raise unittest.SkipTest("test works only on Linux 2.6") @@ -186,10 +187,16 @@ def test_control_and_wait(self): client.sendall(b"Hello!") server.sendall(b"world!!!") - now = time.monotonic() - events = ep.poll(1.0, 4) - then = time.monotonic() - self.assertFalse(then - now > 0.01) + # we might receive events one at a time, necessitating multiple calls to + # poll + events = [] + for _ in support.busy_retry(support.SHORT_TIMEOUT): + now = time.monotonic() + events += ep.poll(1.0, 4) + then = time.monotonic() + self.assertFalse(then - now > 0.01) + if len(events) >= 2: + break expected = [(client.fileno(), select.EPOLLIN | select.EPOLLOUT), (server.fileno(), select.EPOLLIN | select.EPOLLOUT)] diff --git a/Lib/test/test_format.py b/Lib/test/test_format.py index 69b0d5f1c5a515..6fa49dbc0b730c 100644 --- a/Lib/test/test_format.py +++ b/Lib/test/test_format.py @@ -619,6 +619,8 @@ def test_specifier_z_error(self): error_msg = re.escape("unsupported format character 'z'") with self.assertRaisesRegex(ValueError, error_msg): "%z.1f" % 0 # not allowed in old style string interpolation + with self.assertRaisesRegex(ValueError, error_msg): + b"%z.1f" % 0 if __name__ == "__main__": diff --git a/Lib/test/test_fstring.py b/Lib/test/test_fstring.py index 5e94c99ae65af1..58e2550715cecf 100644 --- a/Lib/test/test_fstring.py +++ b/Lib/test/test_fstring.py @@ -561,11 +561,28 @@ def test_mismatched_parens(self): ]) self.assertRaises(SyntaxError, eval, "f'{" + "("*500 + "}'") + @unittest.skipIf(support.is_wasi, "exhausts limited stack on WASI") def test_fstring_nested_too_deeply(self): self.assertAllRaise(SyntaxError, "f-string: expressions nested too deeply", ['f"{1+2:{1+2:{1+1:{1}}}}"']) + def create_nested_fstring(n): + if n == 0: + return "1+1" + prev = create_nested_fstring(n-1) + return f'f"{{{prev}}}"' + + self.assertAllRaise(SyntaxError, + "too many nested f-strings", + [create_nested_fstring(160)]) + + def test_syntax_error_in_nested_fstring(self): + # See gh-104016 for more information on this crash + self.assertAllRaise(SyntaxError, + "invalid syntax", + ['f"{1 1:' + ('{f"1:' * 199)]) + def test_double_braces(self): self.assertEqual(f'{{', '{') self.assertEqual(f'a{{', 'a{') @@ -963,11 +980,18 @@ def test_roundtrip_raw_quotes(self): self.assertEqual(fr'\"\'\"\'', '\\"\\\'\\"\\\'') def test_fstring_backslash_before_double_bracket(self): - self.assertEqual(f'\{{\}}', '\\{\\}') - self.assertEqual(f'\{{', '\\{') - self.assertEqual(f'\{{{1+1}', '\\{2') - self.assertEqual(f'\}}{1+1}', '\\}2') - self.assertEqual(f'{1+1}\}}', '2\\}') + deprecated_cases = [ + (r"f'\{{\}}'", '\\{\\}'), + (r"f'\{{'", '\\{'), + (r"f'\{{{1+1}'", '\\{2'), + (r"f'\}}{1+1}'", '\\}2'), + (r"f'{1+1}\}}'", '2\\}') + ] + for case, expected_result in deprecated_cases: + with self.subTest(case=case, expected_result=expected_result): + with self.assertWarns(DeprecationWarning): + result = eval(case) + self.assertEqual(result, expected_result) self.assertEqual(fr'\{{\}}', '\\{\\}') self.assertEqual(fr'\{{', '\\{') self.assertEqual(fr'\{{{1+1}', '\\{2') diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py index cc782ea1ee5dff..31680b5a92e0f3 100644 --- a/Lib/test/test_generators.py +++ b/Lib/test/test_generators.py @@ -2141,11 +2141,10 @@ def printsolution(self, x): ... SyntaxError: 'yield' outside function -# Pegen does not produce this error message yet -# >>> def f(): x = yield = y -# Traceback (most recent call last): -# ... -# SyntaxError: assignment to yield expression not possible +>>> def f(): x = yield = y +Traceback (most recent call last): + ... +SyntaxError: assignment to yield expression not possible >>> def f(): (yield bar) = y Traceback (most recent call last): diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py index b4f4e2b14351a6..4b1d355f550b49 100644 --- a/Lib/test/test_httplib.py +++ b/Lib/test/test_httplib.py @@ -2390,6 +2390,43 @@ def test_tunnel_debuglog(self): lines = output.getvalue().splitlines() self.assertIn('header: {}'.format(expected_header), lines) + def test_proxy_response_headers(self): + expected_header = ('X-Dummy', '1') + response_text = ( + 'HTTP/1.0 200 OK\r\n' + '{0}\r\n\r\n'.format(':'.join(expected_header)) + ) + + self.conn._create_connection = self._create_connection(response_text) + self.conn.set_tunnel('destination.com') + + self.conn.request('PUT', '/', '') + headers = self.conn._proxy_response_headers + self.assertIn(expected_header, headers.items()) + + def test_tunnel_leak(self): + sock = None + + def _create_connection(address, timeout=None, source_address=None): + nonlocal sock + sock = FakeSocket( + 'HTTP/1.1 404 NOT FOUND\r\n\r\n', + host=address[0], + port=address[1], + ) + return sock + + self.conn._create_connection = _create_connection + self.conn.set_tunnel('destination.com') + exc = None + try: + self.conn.request('HEAD', '/', '') + except OSError as e: + # keeping a reference to exc keeps response alive in the traceback + exc = e + self.assertIsNotNone(exc) + self.assertTrue(sock.file_closed) + if __name__ == '__main__': unittest.main(verbosity=2) diff --git a/Lib/test/test_httpservers.py b/Lib/test/test_httpservers.py index cbcf94136ac4eb..0382b5ec448d57 100644 --- a/Lib/test/test_httpservers.py +++ b/Lib/test/test_httpservers.py @@ -418,6 +418,14 @@ def test_undecodable_filename(self): self.check_status_and_reason(response, HTTPStatus.OK, data=os_helper.TESTFN_UNDECODABLE) + def test_undecodable_parameter(self): + # sanity check using a valid parameter + response = self.request(self.base_url + '/?x=123').read() + self.assertRegex(response, f'listing for {self.base_url}/\?x=123'.encode('latin1')) + # now the bogus encoding + response = self.request(self.base_url + '/?x=%bb').read() + self.assertRegex(response, f'listing for {self.base_url}/\?x=\xef\xbf\xbd'.encode('latin1')) + def test_get_dir_redirect_location_domain_injection_bug(self): """Ensure //evil.co/..%2f../../X does not put //evil.co/ in Location. diff --git a/Lib/test/test_idle.py b/Lib/test/test_idle.py index b94b18a541a701..90cff9002b75b2 100644 --- a/Lib/test/test_idle.py +++ b/Lib/test/test_idle.py @@ -3,7 +3,7 @@ from test.support import check_sanitizer if check_sanitizer(address=True, memory=True): - raise unittest.SkipTest("Tests involvin libX11 can SEGFAULT on ASAN/MSAN builds") + raise unittest.SkipTest("Tests involving libX11 can SEGFAULT on ASAN/MSAN builds") # Skip test_idle if _tkinter wasn't built, if tkinter is missing, # if tcl/tk is not the 8.5+ needed for ttk widgets, diff --git a/Lib/test/test_import/__init__.py b/Lib/test/test_import/__init__.py index 41dfdaabe24664..e2384a08ecaa90 100644 --- a/Lib/test/test_import/__init__.py +++ b/Lib/test/test_import/__init__.py @@ -1640,6 +1640,7 @@ class SubinterpImportTests(unittest.TestCase): ) ISOLATED = dict( use_main_obmalloc=False, + own_gil=True, ) NOT_ISOLATED = {k: not v for k, v in ISOLATED.items()} @@ -1652,26 +1653,44 @@ def pipe(self): os.set_blocking(r, False) return (r, w) - def import_script(self, name, fd, check_override=None): + def import_script(self, name, fd, filename=None, check_override=None): override_text = '' if check_override is not None: override_text = f''' - import _imp - _imp._override_multi_interp_extensions_check({check_override}) - ''' - return textwrap.dedent(f''' - import os, sys - {override_text} - try: - import {name} - except ImportError as exc: - text = 'ImportError: ' + str(exc) - else: - text = 'okay' - os.write({fd}, text.encode('utf-8')) - ''') + import _imp + _imp._override_multi_interp_extensions_check({check_override}) + ''' + if filename: + return textwrap.dedent(f''' + from importlib.util import spec_from_loader, module_from_spec + from importlib.machinery import ExtensionFileLoader + import os, sys + {override_text} + loader = ExtensionFileLoader({name!r}, {filename!r}) + spec = spec_from_loader({name!r}, loader) + try: + module = module_from_spec(spec) + loader.exec_module(module) + except ImportError as exc: + text = 'ImportError: ' + str(exc) + else: + text = 'okay' + os.write({fd}, text.encode('utf-8')) + ''') + else: + return textwrap.dedent(f''' + import os, sys + {override_text} + try: + import {name} + except ImportError as exc: + text = 'ImportError: ' + str(exc) + else: + text = 'okay' + os.write({fd}, text.encode('utf-8')) + ''') - def run_here(self, name, *, + def run_here(self, name, filename=None, *, check_singlephase_setting=False, check_singlephase_override=None, isolated=False, @@ -1700,26 +1719,30 @@ def run_here(self, name, *, ) r, w = self.pipe() - script = self.import_script(name, w, check_singlephase_override) + script = self.import_script(name, w, filename, + check_singlephase_override) ret = run_in_subinterp_with_config(script, **kwargs) self.assertEqual(ret, 0) return os.read(r, 100) - def check_compatible_here(self, name, *, strict=False, isolated=False): + def check_compatible_here(self, name, filename=None, *, + strict=False, + isolated=False, + ): # Verify that the named module may be imported in a subinterpreter. # (See run_here() for more info.) - out = self.run_here(name, + out = self.run_here(name, filename, check_singlephase_setting=strict, isolated=isolated, ) self.assertEqual(out, b'okay') - def check_incompatible_here(self, name, *, isolated=False): + def check_incompatible_here(self, name, filename=None, *, isolated=False): # Differences from check_compatible_here(): # * verify that import fails # * "strict" is always True - out = self.run_here(name, + out = self.run_here(name, filename, check_singlephase_setting=True, isolated=isolated, ) @@ -1820,6 +1843,44 @@ def test_multi_init_extension_compat(self): with self.subTest(f'{module}: strict, fresh'): self.check_compatible_fresh(module, strict=True) + @unittest.skipIf(_testmultiphase is None, "test requires _testmultiphase module") + def test_multi_init_extension_non_isolated_compat(self): + modname = '_test_non_isolated' + filename = _testmultiphase.__file__ + loader = ExtensionFileLoader(modname, filename) + spec = importlib.util.spec_from_loader(modname, loader) + module = importlib.util.module_from_spec(spec) + loader.exec_module(module) + sys.modules[modname] = module + + require_extension(module) + with self.subTest(f'{modname}: isolated'): + self.check_incompatible_here(modname, filename, isolated=True) + with self.subTest(f'{modname}: not isolated'): + self.check_incompatible_here(modname, filename, isolated=False) + with self.subTest(f'{modname}: not strict'): + self.check_compatible_here(modname, filename, strict=False) + + @unittest.skipIf(_testmultiphase is None, "test requires _testmultiphase module") + def test_multi_init_extension_per_interpreter_gil_compat(self): + modname = '_test_shared_gil_only' + filename = _testmultiphase.__file__ + loader = ExtensionFileLoader(modname, filename) + spec = importlib.util.spec_from_loader(modname, loader) + module = importlib.util.module_from_spec(spec) + loader.exec_module(module) + sys.modules[modname] = module + + require_extension(module) + with self.subTest(f'{modname}: isolated, strict'): + self.check_incompatible_here(modname, filename, isolated=True) + with self.subTest(f'{modname}: not isolated, strict'): + self.check_compatible_here(modname, filename, + strict=True, isolated=False) + with self.subTest(f'{modname}: not isolated, not strict'): + self.check_compatible_here(modname, filename, + strict=False, isolated=False) + def test_python_compat(self): module = 'threading' require_pure_python(module) diff --git a/Lib/test/test_importlib/builtin/test_finder.py b/Lib/test/test_importlib/builtin/test_finder.py index 81dc5a3699d952..111c4af1ea7cfe 100644 --- a/Lib/test/test_importlib/builtin/test_finder.py +++ b/Lib/test/test_importlib/builtin/test_finder.py @@ -43,38 +43,5 @@ def test_failure(self): ) = util.test_both(FindSpecTests, machinery=machinery) -@unittest.skipIf(util.BUILTINS.good_name is None, 'no reasonable builtin module') -class FinderTests(abc.FinderTests): - - """Test find_module() for built-in modules.""" - - def test_module(self): - # Common case. - with util.uncache(util.BUILTINS.good_name): - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - found = self.machinery.BuiltinImporter.find_module(util.BUILTINS.good_name) - self.assertTrue(found) - self.assertTrue(hasattr(found, 'load_module')) - - # Built-in modules cannot be a package. - test_package = test_package_in_package = test_package_over_module = None - - # Built-in modules cannot be in a package. - test_module_in_package = None - - def test_failure(self): - assert 'importlib' not in sys.builtin_module_names - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - loader = self.machinery.BuiltinImporter.find_module('importlib') - self.assertIsNone(loader) - - -(Frozen_FinderTests, - Source_FinderTests - ) = util.test_both(FinderTests, machinery=machinery) - - if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_importlib/extension/test_loader.py b/Lib/test/test_importlib/extension/test_loader.py index 3bf2bbdcdcc4e6..3a74b821eaee49 100644 --- a/Lib/test/test_importlib/extension/test_loader.py +++ b/Lib/test/test_importlib/extension/test_loader.py @@ -348,6 +348,8 @@ def test_bad_modules(self): 'exec_err', 'exec_raise', 'exec_unreported_exception', + 'multiple_create_slots', + 'multiple_multiple_interpreters_slots', ]: with self.subTest(name_base): name = self.name + '_' + name_base diff --git a/Lib/test/test_importlib/extension/test_path_hook.py b/Lib/test/test_importlib/extension/test_path_hook.py index a0adc70ad1ec4d..ec9644dc520534 100644 --- a/Lib/test/test_importlib/extension/test_path_hook.py +++ b/Lib/test/test_importlib/extension/test_path_hook.py @@ -19,7 +19,7 @@ def hook(self, entry): def test_success(self): # Path hook should handle a directory where a known extension module # exists. - self.assertTrue(hasattr(self.hook(util.EXTENSIONS.path), 'find_module')) + self.assertTrue(hasattr(self.hook(util.EXTENSIONS.path), 'find_spec')) (Frozen_PathHooksTests, diff --git a/Lib/test/test_importlib/fixtures.py b/Lib/test/test_importlib/fixtures.py index a364a977bce781..73e5da2ba92279 100644 --- a/Lib/test/test_importlib/fixtures.py +++ b/Lib/test/test_importlib/fixtures.py @@ -350,11 +350,6 @@ def DALS(str): return textwrap.dedent(str).lstrip() -class NullFinder: - def find_module(self, name): - pass - - @requires_zlib() class ZipFixtures: root = 'test.test_importlib.data' diff --git a/Lib/test/test_importlib/frozen/test_finder.py b/Lib/test/test_importlib/frozen/test_finder.py index 069755606b40af..469dcdbd09eaf7 100644 --- a/Lib/test/test_importlib/frozen/test_finder.py +++ b/Lib/test/test_importlib/frozen/test_finder.py @@ -182,45 +182,5 @@ def test_not_using_frozen(self): ) = util.test_both(FindSpecTests, machinery=machinery) -class FinderTests(abc.FinderTests): - - """Test finding frozen modules.""" - - def find(self, name, path=None): - finder = self.machinery.FrozenImporter - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - with import_helper.frozen_modules(): - return finder.find_module(name, path) - - def test_module(self): - name = '__hello__' - loader = self.find(name) - self.assertTrue(hasattr(loader, 'load_module')) - - def test_package(self): - loader = self.find('__phello__') - self.assertTrue(hasattr(loader, 'load_module')) - - def test_module_in_package(self): - loader = self.find('__phello__.spam', ['__phello__']) - self.assertTrue(hasattr(loader, 'load_module')) - - # No frozen package within another package to test with. - test_package_in_package = None - - # No easy way to test. - test_package_over_module = None - - def test_failure(self): - loader = self.find('') - self.assertIsNone(loader) - - -(Frozen_FinderTests, - Source_FinderTests - ) = util.test_both(FinderTests, machinery=machinery) - - if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_importlib/frozen/test_loader.py b/Lib/test/test_importlib/frozen/test_loader.py index da1569e3d0681e..4f1af454b52c71 100644 --- a/Lib/test/test_importlib/frozen/test_loader.py +++ b/Lib/test/test_importlib/frozen/test_loader.py @@ -125,88 +125,6 @@ def test_unloadable(self): ) = util.test_both(ExecModuleTests, machinery=machinery) -class LoaderTests(abc.LoaderTests): - - def load_module(self, name): - with fresh(name, oldapi=True): - module = self.machinery.FrozenImporter.load_module(name) - with captured_stdout() as stdout: - module.main() - return module, stdout - - def test_module(self): - module, stdout = self.load_module('__hello__') - filename = resolve_stdlib_file('__hello__') - check = {'__name__': '__hello__', - '__package__': '', - '__loader__': self.machinery.FrozenImporter, - '__file__': filename, - } - for attr, value in check.items(): - self.assertEqual(getattr(module, attr, None), value) - self.assertEqual(stdout.getvalue(), 'Hello world!\n') - - def test_package(self): - module, stdout = self.load_module('__phello__') - filename = resolve_stdlib_file('__phello__', ispkg=True) - pkgdir = os.path.dirname(filename) - check = {'__name__': '__phello__', - '__package__': '__phello__', - '__path__': [pkgdir], - '__loader__': self.machinery.FrozenImporter, - '__file__': filename, - } - for attr, value in check.items(): - attr_value = getattr(module, attr, None) - self.assertEqual(attr_value, value, - "for __phello__.%s, %r != %r" % - (attr, attr_value, value)) - self.assertEqual(stdout.getvalue(), 'Hello world!\n') - - def test_lacking_parent(self): - with util.uncache('__phello__'): - module, stdout = self.load_module('__phello__.spam') - filename = resolve_stdlib_file('__phello__.spam') - check = {'__name__': '__phello__.spam', - '__package__': '__phello__', - '__loader__': self.machinery.FrozenImporter, - '__file__': filename, - } - for attr, value in check.items(): - attr_value = getattr(module, attr) - self.assertEqual(attr_value, value, - "for __phello__.spam.%s, %r != %r" % - (attr, attr_value, value)) - self.assertEqual(stdout.getvalue(), 'Hello world!\n') - - def test_module_reuse(self): - with fresh('__hello__', oldapi=True): - module1 = self.machinery.FrozenImporter.load_module('__hello__') - module2 = self.machinery.FrozenImporter.load_module('__hello__') - with captured_stdout() as stdout: - module1.main() - module2.main() - self.assertIs(module1, module2) - self.assertEqual(stdout.getvalue(), - 'Hello world!\nHello world!\n') - - # No way to trigger an error in a frozen module. - test_state_after_failure = None - - def test_unloadable(self): - with import_helper.frozen_modules(): - with deprecated(): - assert self.machinery.FrozenImporter.find_module('_not_real') is None - with self.assertRaises(ImportError) as cm: - self.load_module('_not_real') - self.assertEqual(cm.exception.name, '_not_real') - - -(Frozen_LoaderTests, - Source_LoaderTests - ) = util.test_both(LoaderTests, machinery=machinery) - - class InspectLoaderTests: """Tests for the InspectLoader methods for FrozenImporter.""" diff --git a/Lib/test/test_importlib/import_/test___loader__.py b/Lib/test/test_importlib/import_/test___loader__.py index eaf665a6f5b5af..a14163919af677 100644 --- a/Lib/test/test_importlib/import_/test___loader__.py +++ b/Lib/test/test_importlib/import_/test___loader__.py @@ -33,48 +33,5 @@ def test___loader__(self): ) = util.test_both(SpecLoaderAttributeTests, __import__=util.__import__) -class LoaderMock: - - def find_module(self, fullname, path=None): - return self - - def load_module(self, fullname): - sys.modules[fullname] = self.module - return self.module - - -class LoaderAttributeTests: - - def test___loader___missing(self): - with warnings.catch_warnings(): - warnings.simplefilter("ignore", ImportWarning) - module = types.ModuleType('blah') - try: - del module.__loader__ - except AttributeError: - pass - loader = LoaderMock() - loader.module = module - with util.uncache('blah'), util.import_state(meta_path=[loader]): - module = self.__import__('blah') - self.assertEqual(loader, module.__loader__) - - def test___loader___is_None(self): - with warnings.catch_warnings(): - warnings.simplefilter("ignore", ImportWarning) - module = types.ModuleType('blah') - module.__loader__ = None - loader = LoaderMock() - loader.module = module - with util.uncache('blah'), util.import_state(meta_path=[loader]): - returned_module = self.__import__('blah') - self.assertEqual(loader, module.__loader__) - - -(Frozen_Tests, - Source_Tests - ) = util.test_both(LoaderAttributeTests, __import__=util.__import__) - - if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_importlib/import_/test___package__.py b/Lib/test/test_importlib/import_/test___package__.py index ab1b35ee3c1a4e..7130c99a6fc171 100644 --- a/Lib/test/test_importlib/import_/test___package__.py +++ b/Lib/test/test_importlib/import_/test___package__.py @@ -95,25 +95,6 @@ def __init__(self, parent): self.parent = parent -class Using__package__PEP302(Using__package__): - mock_modules = util.mock_modules - - def test_using___package__(self): - with warnings.catch_warnings(): - warnings.simplefilter("ignore", ImportWarning) - super().test_using___package__() - - def test_spec_fallback(self): - with warnings.catch_warnings(): - warnings.simplefilter("ignore", ImportWarning) - super().test_spec_fallback() - - -(Frozen_UsingPackagePEP302, - Source_UsingPackagePEP302 - ) = util.test_both(Using__package__PEP302, __import__=util.__import__) - - class Using__package__PEP451(Using__package__): mock_modules = util.mock_spec @@ -162,23 +143,6 @@ def test_submodule(self): module = getattr(pkg, 'mod') self.assertEqual(module.__package__, 'pkg') -class Setting__package__PEP302(Setting__package__, unittest.TestCase): - mock_modules = util.mock_modules - - def test_top_level(self): - with warnings.catch_warnings(): - warnings.simplefilter("ignore", ImportWarning) - super().test_top_level() - - def test_package(self): - with warnings.catch_warnings(): - warnings.simplefilter("ignore", ImportWarning) - super().test_package() - - def test_submodule(self): - with warnings.catch_warnings(): - warnings.simplefilter("ignore", ImportWarning) - super().test_submodule() class Setting__package__PEP451(Setting__package__, unittest.TestCase): mock_modules = util.mock_spec diff --git a/Lib/test/test_importlib/import_/test_api.py b/Lib/test/test_importlib/import_/test_api.py index 0ee032b0206df9..d6ad590b3d46a0 100644 --- a/Lib/test/test_importlib/import_/test_api.py +++ b/Lib/test/test_importlib/import_/test_api.py @@ -28,11 +28,6 @@ def exec_module(module): class BadLoaderFinder: - @classmethod - def find_module(cls, fullname, path): - if fullname == SUBMOD_NAME: - return cls - @classmethod def load_module(cls, fullname): if fullname == SUBMOD_NAME: diff --git a/Lib/test/test_importlib/import_/test_caching.py b/Lib/test/test_importlib/import_/test_caching.py index 3ca765fb4ada97..aedf0fd4f9db02 100644 --- a/Lib/test/test_importlib/import_/test_caching.py +++ b/Lib/test/test_importlib/import_/test_caching.py @@ -52,12 +52,11 @@ class ImportlibUseCache(UseCache, unittest.TestCase): __import__ = util.__import__['Source'] def create_mock(self, *names, return_=None): - mock = util.mock_modules(*names) - original_load = mock.load_module - def load_module(self, fullname): - original_load(fullname) - return return_ - mock.load_module = MethodType(load_module, mock) + mock = util.mock_spec(*names) + original_spec = mock.find_spec + def find_spec(self, fullname, path, target=None): + return original_spec(fullname) + mock.find_spec = MethodType(find_spec, mock) return mock # __import__ inconsistent between loaders and built-in import when it comes @@ -86,14 +85,12 @@ def test_using_cache_for_assigning_to_attribute(self): # See test_using_cache_after_loader() for reasoning. def test_using_cache_for_fromlist(self): # [from cache for fromlist] - with warnings.catch_warnings(): - warnings.simplefilter("ignore", ImportWarning) - with self.create_mock('pkg.__init__', 'pkg.module') as importer: - with util.import_state(meta_path=[importer]): - module = self.__import__('pkg', fromlist=['module']) - self.assertTrue(hasattr(module, 'module')) - self.assertEqual(id(module.module), - id(sys.modules['pkg.module'])) + with self.create_mock('pkg.__init__', 'pkg.module') as importer: + with util.import_state(meta_path=[importer]): + module = self.__import__('pkg', fromlist=['module']) + self.assertTrue(hasattr(module, 'module')) + self.assertEqual(id(module.module), + id(sys.modules['pkg.module'])) if __name__ == '__main__': diff --git a/Lib/test/test_importlib/import_/test_meta_path.py b/Lib/test/test_importlib/import_/test_meta_path.py index c8b898ec237850..8689017ba43112 100644 --- a/Lib/test/test_importlib/import_/test_meta_path.py +++ b/Lib/test/test_importlib/import_/test_meta_path.py @@ -113,16 +113,6 @@ def test_with_path(self): super().test_no_path() -class CallSignaturePEP302(CallSignoreSuppressImportWarning): - mock_modules = util.mock_modules - finder_name = 'find_module' - - -(Frozen_CallSignaturePEP302, - Source_CallSignaturePEP302 - ) = util.test_both(CallSignaturePEP302, __import__=util.__import__) - - class CallSignaturePEP451(CallSignature): mock_modules = util.mock_spec finder_name = 'find_spec' diff --git a/Lib/test/test_importlib/import_/test_path.py b/Lib/test/test_importlib/import_/test_path.py index de620842bbc52b..89b52fbd1e1aff 100644 --- a/Lib/test/test_importlib/import_/test_path.py +++ b/Lib/test/test_importlib/import_/test_path.py @@ -116,46 +116,6 @@ def test_None_on_sys_path(self): if email is not missing: sys.modules['email'] = email - def test_finder_with_find_module(self): - class TestFinder: - def find_module(self, fullname): - return self.to_return - failing_finder = TestFinder() - failing_finder.to_return = None - path = 'testing path' - with util.import_state(path_importer_cache={path: failing_finder}): - with warnings.catch_warnings(): - warnings.simplefilter("ignore", ImportWarning) - self.assertIsNone( - self.machinery.PathFinder.find_spec('whatever', [path])) - success_finder = TestFinder() - success_finder.to_return = __loader__ - with util.import_state(path_importer_cache={path: success_finder}): - with warnings.catch_warnings(): - warnings.simplefilter("ignore", ImportWarning) - spec = self.machinery.PathFinder.find_spec('whatever', [path]) - self.assertEqual(spec.loader, __loader__) - - def test_finder_with_find_loader(self): - class TestFinder: - loader = None - portions = [] - def find_loader(self, fullname): - return self.loader, self.portions - path = 'testing path' - with util.import_state(path_importer_cache={path: TestFinder()}): - with warnings.catch_warnings(): - warnings.simplefilter("ignore", ImportWarning) - self.assertIsNone( - self.machinery.PathFinder.find_spec('whatever', [path])) - success_finder = TestFinder() - success_finder.loader = __loader__ - with util.import_state(path_importer_cache={path: success_finder}): - with warnings.catch_warnings(): - warnings.simplefilter("ignore", ImportWarning) - spec = self.machinery.PathFinder.find_spec('whatever', [path]) - self.assertEqual(spec.loader, __loader__) - def test_finder_with_find_spec(self): class TestFinder: spec = None @@ -228,9 +188,9 @@ def invalidate_caches(self): class FindModuleTests(FinderTests): def find(self, *args, **kwargs): - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - return self.machinery.PathFinder.find_module(*args, **kwargs) + spec = self.machinery.PathFinder.find_spec(*args, **kwargs) + return None if spec is None else spec.loader + def check_found(self, found, importer): self.assertIs(found, importer) @@ -255,16 +215,14 @@ def check_found(self, found, importer): class PathEntryFinderTests: def test_finder_with_failing_find_spec(self): - # PathEntryFinder with find_module() defined should work. - # Issue #20763. class Finder: - path_location = 'test_finder_with_find_module' + path_location = 'test_finder_with_find_spec' def __init__(self, path): if path != self.path_location: raise ImportError @staticmethod - def find_module(fullname): + def find_spec(fullname, target=None): return None @@ -274,27 +232,6 @@ def find_module(fullname): warnings.simplefilter("ignore", ImportWarning) self.machinery.PathFinder.find_spec('importlib') - def test_finder_with_failing_find_module(self): - # PathEntryFinder with find_module() defined should work. - # Issue #20763. - class Finder: - path_location = 'test_finder_with_find_module' - def __init__(self, path): - if path != self.path_location: - raise ImportError - - @staticmethod - def find_module(fullname): - return None - - - with util.import_state(path=[Finder.path_location]+sys.path[:], - path_hooks=[Finder]): - with warnings.catch_warnings(): - warnings.simplefilter("ignore", ImportWarning) - warnings.simplefilter("ignore", DeprecationWarning) - self.machinery.PathFinder.find_module('importlib') - (Frozen_PEFTests, Source_PEFTests diff --git a/Lib/test/test_importlib/source/test_case_sensitivity.py b/Lib/test/test_importlib/source/test_case_sensitivity.py index 9d472707abe840..6a06313319dbcd 100644 --- a/Lib/test/test_importlib/source/test_case_sensitivity.py +++ b/Lib/test/test_importlib/source/test_case_sensitivity.py @@ -63,19 +63,6 @@ def test_insensitive(self): self.assertIn(self.name, insensitive.get_filename(self.name)) -class CaseSensitivityTestPEP302(CaseSensitivityTest): - def find(self, finder): - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - return finder.find_module(self.name) - - -(Frozen_CaseSensitivityTestPEP302, - Source_CaseSensitivityTestPEP302 - ) = util.test_both(CaseSensitivityTestPEP302, importlib=importlib, - machinery=machinery) - - class CaseSensitivityTestPEP451(CaseSensitivityTest): def find(self, finder): found = finder.find_spec(self.name) diff --git a/Lib/test/test_importlib/source/test_finder.py b/Lib/test/test_importlib/source/test_finder.py index bed9d56dca84ee..12db7c7d352a2f 100644 --- a/Lib/test/test_importlib/source/test_finder.py +++ b/Lib/test/test_importlib/source/test_finder.py @@ -120,7 +120,7 @@ def test_package_over_module(self): def test_failure(self): with util.create_modules('blah') as mapping: nothing = self.import_(mapping['.root'], 'sdfsadsadf') - self.assertIsNone(nothing) + self.assertEqual(nothing, self.NOT_FOUND) def test_empty_string_for_dir(self): # The empty string from sys.path means to search in the cwd. @@ -150,7 +150,7 @@ def test_dir_removal_handling(self): found = self._find(finder, 'mod', loader_only=True) self.assertIsNotNone(found) found = self._find(finder, 'mod', loader_only=True) - self.assertIsNone(found) + self.assertEqual(found, self.NOT_FOUND) @unittest.skipUnless(sys.platform != 'win32', 'os.chmod() does not support the needed arguments under Windows') @@ -196,10 +196,12 @@ class FinderTestsPEP420(FinderTests): NOT_FOUND = (None, []) def _find(self, finder, name, loader_only=False): - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - loader_portions = finder.find_loader(name) - return loader_portions[0] if loader_only else loader_portions + spec = finder.find_spec(name) + if spec is None: + return self.NOT_FOUND + if loader_only: + return spec.loader + return spec.loader, spec.submodule_search_locations (Frozen_FinderTestsPEP420, @@ -207,20 +209,5 @@ def _find(self, finder, name, loader_only=False): ) = util.test_both(FinderTestsPEP420, machinery=machinery) -class FinderTestsPEP302(FinderTests): - - NOT_FOUND = None - - def _find(self, finder, name, loader_only=False): - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - return finder.find_module(name) - - -(Frozen_FinderTestsPEP302, - Source_FinderTestsPEP302 - ) = util.test_both(FinderTestsPEP302, machinery=machinery) - - if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_importlib/source/test_path_hook.py b/Lib/test/test_importlib/source/test_path_hook.py index ead62f5e945e2a..f274330e0b333b 100644 --- a/Lib/test/test_importlib/source/test_path_hook.py +++ b/Lib/test/test_importlib/source/test_path_hook.py @@ -18,19 +18,10 @@ def test_success(self): self.assertTrue(hasattr(self.path_hook()(mapping['.root']), 'find_spec')) - def test_success_legacy(self): - with util.create_modules('dummy') as mapping: - self.assertTrue(hasattr(self.path_hook()(mapping['.root']), - 'find_module')) - def test_empty_string(self): # The empty string represents the cwd. self.assertTrue(hasattr(self.path_hook()(''), 'find_spec')) - def test_empty_string_legacy(self): - # The empty string represents the cwd. - self.assertTrue(hasattr(self.path_hook()(''), 'find_module')) - (Frozen_PathHookTest, Source_PathHooktest diff --git a/Lib/test/test_importlib/test_abc.py b/Lib/test/test_importlib/test_abc.py index 3c9149c4e45a92..603125f6d926f6 100644 --- a/Lib/test/test_importlib/test_abc.py +++ b/Lib/test/test_importlib/test_abc.py @@ -147,20 +147,13 @@ def ins(self): class MetaPathFinder: - def find_module(self, fullname, path): - return super().find_module(fullname, path) + pass class MetaPathFinderDefaultsTests(ABCTestHarness): SPLIT = make_abc_subclasses(MetaPathFinder) - def test_find_module(self): - # Default should return None. - with self.assertWarns(DeprecationWarning): - found = self.ins.find_module('something', None) - self.assertIsNone(found) - def test_invalidate_caches(self): # Calling the method is a no-op. self.ins.invalidate_caches() @@ -173,22 +166,13 @@ def test_invalidate_caches(self): class PathEntryFinder: - def find_loader(self, fullname): - return super().find_loader(fullname) + pass class PathEntryFinderDefaultsTests(ABCTestHarness): SPLIT = make_abc_subclasses(PathEntryFinder) - def test_find_loader(self): - with self.assertWarns(DeprecationWarning): - found = self.ins.find_loader('something') - self.assertEqual(found, (None, [])) - - def find_module(self): - self.assertEqual(None, self.ins.find_module('something')) - def test_invalidate_caches(self): # Should be a no-op. self.ins.invalidate_caches() @@ -201,8 +185,7 @@ def test_invalidate_caches(self): class Loader: - def load_module(self, fullname): - return super().load_module(fullname) + pass class LoaderDefaultsTests(ABCTestHarness): @@ -333,14 +316,6 @@ def find_spec(self, fullname, path, target=None): return MetaPathSpecFinder() - def test_find_module(self): - finder = self.finder(None) - path = ['a', 'b', 'c'] - name = 'blah' - with self.assertWarns(DeprecationWarning): - found = finder.find_module(name, path) - self.assertIsNone(found) - def test_find_spec_with_explicit_target(self): loader = object() spec = self.util.spec_from_loader('blah', loader) @@ -370,53 +345,6 @@ def test_spec(self): ) = test_util.test_both(MetaPathFinderFindModuleTests, abc=abc, util=util) -##### PathEntryFinder concrete methods ######################################### -class PathEntryFinderFindLoaderTests: - - @classmethod - def finder(cls, spec): - class PathEntrySpecFinder(cls.abc.PathEntryFinder): - - def find_spec(self, fullname, target=None): - self.called_for = fullname - return spec - - return PathEntrySpecFinder() - - def test_no_spec(self): - finder = self.finder(None) - name = 'blah' - with self.assertWarns(DeprecationWarning): - found = finder.find_loader(name) - self.assertIsNone(found[0]) - self.assertEqual([], found[1]) - self.assertEqual(name, finder.called_for) - - def test_spec_with_loader(self): - loader = object() - spec = self.util.spec_from_loader('blah', loader) - finder = self.finder(spec) - with self.assertWarns(DeprecationWarning): - found = finder.find_loader('blah') - self.assertIs(found[0], spec.loader) - - def test_spec_with_portions(self): - spec = self.machinery.ModuleSpec('blah', None) - paths = ['a', 'b', 'c'] - spec.submodule_search_locations = paths - finder = self.finder(spec) - with self.assertWarns(DeprecationWarning): - found = finder.find_loader('blah') - self.assertIsNone(found[0]) - self.assertEqual(paths, found[1]) - - -(Frozen_PEFFindLoaderTests, - Source_PEFFindLoaderTests - ) = test_util.test_both(PathEntryFinderFindLoaderTests, abc=abc, util=util, - machinery=machinery) - - ##### Loader concrete methods ################################################## class LoaderLoadModuleTests: diff --git a/Lib/test/test_importlib/test_api.py b/Lib/test/test_importlib/test_api.py index b3a99dc2dd5731..ecf2c47c462e23 100644 --- a/Lib/test/test_importlib/test_api.py +++ b/Lib/test/test_importlib/test_api.py @@ -95,7 +95,8 @@ def load_b(): (Frozen_ImportModuleTests, Source_ImportModuleTests - ) = test_util.test_both(ImportModuleTests, init=init) + ) = test_util.test_both( + ImportModuleTests, init=init, util=util, machinery=machinery) class FindLoaderTests: @@ -103,29 +104,26 @@ class FindLoaderTests: FakeMetaFinder = None def test_sys_modules(self): - # If a module with __loader__ is in sys.modules, then return it. + # If a module with __spec__.loader is in sys.modules, then return it. name = 'some_mod' with test_util.uncache(name): module = types.ModuleType(name) loader = 'a loader!' - module.__loader__ = loader + module.__spec__ = self.machinery.ModuleSpec(name, loader) sys.modules[name] = module - with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - found = self.init.find_loader(name) - self.assertEqual(loader, found) + spec = self.util.find_spec(name) + self.assertIsNotNone(spec) + self.assertEqual(spec.loader, loader) def test_sys_modules_loader_is_None(self): - # If sys.modules[name].__loader__ is None, raise ValueError. + # If sys.modules[name].__spec__.loader is None, raise ValueError. name = 'some_mod' with test_util.uncache(name): module = types.ModuleType(name) module.__loader__ = None sys.modules[name] = module with self.assertRaises(ValueError): - with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - self.init.find_loader(name) + self.util.find_spec(name) def test_sys_modules_loader_is_not_set(self): # Should raise ValueError @@ -134,24 +132,20 @@ def test_sys_modules_loader_is_not_set(self): with test_util.uncache(name): module = types.ModuleType(name) try: - del module.__loader__ + del module.__spec__.loader except AttributeError: pass sys.modules[name] = module with self.assertRaises(ValueError): - with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - self.init.find_loader(name) + self.util.find_spec(name) def test_success(self): # Return the loader found on sys.meta_path. name = 'some_mod' with test_util.uncache(name): with test_util.import_state(meta_path=[self.FakeMetaFinder]): - with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - warnings.simplefilter('ignore', ImportWarning) - self.assertEqual((name, None), self.init.find_loader(name)) + spec = self.util.find_spec(name) + self.assertEqual((name, (name, None)), (spec.name, spec.loader)) def test_success_path(self): # Searching on a path should work. @@ -159,17 +153,12 @@ def test_success_path(self): path = 'path to some place' with test_util.uncache(name): with test_util.import_state(meta_path=[self.FakeMetaFinder]): - with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - warnings.simplefilter('ignore', ImportWarning) - self.assertEqual((name, path), - self.init.find_loader(name, path)) + spec = self.util.find_spec(name, path) + self.assertEqual(name, spec.name) def test_nothing(self): # None is returned upon failure to find a loader. - with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - self.assertIsNone(self.init.find_loader('nevergoingtofindthismodule')) + self.assertIsNone(self.util.find_spec('nevergoingtofindthismodule')) class FindLoaderPEP451Tests(FindLoaderTests): @@ -182,20 +171,8 @@ def find_spec(name, path=None, target=None): (Frozen_FindLoaderPEP451Tests, Source_FindLoaderPEP451Tests - ) = test_util.test_both(FindLoaderPEP451Tests, init=init) - - -class FindLoaderPEP302Tests(FindLoaderTests): - - class FakeMetaFinder: - @staticmethod - def find_module(name, path=None): - return name, path - - -(Frozen_FindLoaderPEP302Tests, - Source_FindLoaderPEP302Tests - ) = test_util.test_both(FindLoaderPEP302Tests, init=init) + ) = test_util.test_both( + FindLoaderPEP451Tests, init=init, util=util, machinery=machinery) class ReloadTests: @@ -380,7 +357,8 @@ def test_module_missing_spec(self): (Frozen_ReloadTests, Source_ReloadTests - ) = test_util.test_both(ReloadTests, init=init, util=util) + ) = test_util.test_both( + ReloadTests, init=init, util=util, machinery=machinery) class InvalidateCacheTests: @@ -390,8 +368,6 @@ def test_method_called(self): class InvalidatingNullFinder: def __init__(self, *ignored): self.called = False - def find_module(self, *args): - return None def invalidate_caches(self): self.called = True @@ -416,7 +392,8 @@ def test_method_lacking(self): (Frozen_InvalidateCacheTests, Source_InvalidateCacheTests - ) = test_util.test_both(InvalidateCacheTests, init=init) + ) = test_util.test_both( + InvalidateCacheTests, init=init, util=util, machinery=machinery) class FrozenImportlibTests(unittest.TestCase): diff --git a/Lib/test/test_importlib/test_util.py b/Lib/test/test_importlib/test_util.py index 08a615ecf5288b..0be504925ecc6a 100644 --- a/Lib/test/test_importlib/test_util.py +++ b/Lib/test/test_importlib/test_util.py @@ -8,14 +8,29 @@ import importlib.util import os import pathlib +import re import string import sys from test import support +import textwrap import types import unittest import unittest.mock import warnings +try: + import _testsinglephase +except ImportError: + _testsinglephase = None +try: + import _testmultiphase +except ImportError: + _testmultiphase = None +try: + import _xxsubinterpreters as _interpreters +except ModuleNotFoundError: + _interpreters = None + class DecodeSourceBytesTests: @@ -637,5 +652,111 @@ def test_magic_number(self): self.assertEqual(EXPECTED_MAGIC_NUMBER, actual, msg) +@unittest.skipIf(_interpreters is None, 'subinterpreters required') +class AllowingAllExtensionsTests(unittest.TestCase): + + ERROR = re.compile("^: module (.*) does not support loading in subinterpreters") + + def run_with_own_gil(self, script): + interpid = _interpreters.create(isolated=True) + try: + _interpreters.run_string(interpid, script) + except _interpreters.RunFailedError as exc: + if m := self.ERROR.match(str(exc)): + modname, = m.groups() + raise ImportError(modname) + + def run_with_shared_gil(self, script): + interpid = _interpreters.create(isolated=False) + try: + _interpreters.run_string(interpid, script) + except _interpreters.RunFailedError as exc: + if m := self.ERROR.match(str(exc)): + modname, = m.groups() + raise ImportError(modname) + + @unittest.skipIf(_testsinglephase is None, "test requires _testsinglephase module") + def test_single_phase_init_module(self): + script = textwrap.dedent(''' + import importlib.util + with importlib.util.allowing_all_extensions(): + import _testsinglephase + ''') + with self.subTest('check disabled, shared GIL'): + self.run_with_shared_gil(script) + with self.subTest('check disabled, per-interpreter GIL'): + self.run_with_own_gil(script) + + script = textwrap.dedent(f''' + import importlib.util + with importlib.util.allowing_all_extensions(False): + import _testsinglephase + ''') + with self.subTest('check enabled, shared GIL'): + with self.assertRaises(ImportError): + self.run_with_shared_gil(script) + with self.subTest('check enabled, per-interpreter GIL'): + with self.assertRaises(ImportError): + self.run_with_own_gil(script) + + @unittest.skipIf(_testmultiphase is None, "test requires _testmultiphase module") + def test_incomplete_multi_phase_init_module(self): + prescript = textwrap.dedent(f''' + from importlib.util import spec_from_loader, module_from_spec + from importlib.machinery import ExtensionFileLoader + + name = '_test_shared_gil_only' + filename = {_testmultiphase.__file__!r} + loader = ExtensionFileLoader(name, filename) + spec = spec_from_loader(name, loader) + + ''') + + script = prescript + textwrap.dedent(''' + import importlib.util + with importlib.util.allowing_all_extensions(): + module = module_from_spec(spec) + loader.exec_module(module) + ''') + with self.subTest('check disabled, shared GIL'): + self.run_with_shared_gil(script) + with self.subTest('check disabled, per-interpreter GIL'): + self.run_with_own_gil(script) + + script = prescript + textwrap.dedent(''' + import importlib.util + with importlib.util.allowing_all_extensions(False): + module = module_from_spec(spec) + loader.exec_module(module) + ''') + with self.subTest('check enabled, shared GIL'): + self.run_with_shared_gil(script) + with self.subTest('check enabled, per-interpreter GIL'): + with self.assertRaises(ImportError): + self.run_with_own_gil(script) + + @unittest.skipIf(_testmultiphase is None, "test requires _testmultiphase module") + def test_complete_multi_phase_init_module(self): + script = textwrap.dedent(''' + import importlib.util + with importlib.util.allowing_all_extensions(): + import _testmultiphase + ''') + with self.subTest('check disabled, shared GIL'): + self.run_with_shared_gil(script) + with self.subTest('check disabled, per-interpreter GIL'): + self.run_with_own_gil(script) + + script = textwrap.dedent(f''' + import importlib.util + with importlib.util.allowing_all_extensions(False): + import _testmultiphase + ''') + with self.subTest('check enabled, shared GIL'): + self.run_with_shared_gil(script) + with self.subTest('check enabled, per-interpreter GIL'): + self.run_with_own_gil(script) + + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_importlib/test_windows.py b/Lib/test/test_importlib/test_windows.py index b7dfe865a03a97..40b8aa1787fe08 100644 --- a/Lib/test/test_importlib/test_windows.py +++ b/Lib/test/test_importlib/test_windows.py @@ -92,30 +92,16 @@ class WindowsRegistryFinderTests: def test_find_spec_missing(self): spec = self.machinery.WindowsRegistryFinder.find_spec('spam') - self.assertIs(spec, None) - - def test_find_module_missing(self): - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - loader = self.machinery.WindowsRegistryFinder.find_module('spam') - self.assertIs(loader, None) + self.assertIsNone(spec) def test_module_found(self): with setup_module(self.machinery, self.test_module): - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - loader = self.machinery.WindowsRegistryFinder.find_module(self.test_module) spec = self.machinery.WindowsRegistryFinder.find_spec(self.test_module) - self.assertIsNot(loader, None) - self.assertIsNot(spec, None) + self.assertIsNotNone(spec) def test_module_not_found(self): with setup_module(self.machinery, self.test_module, path="."): - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - loader = self.machinery.WindowsRegistryFinder.find_module(self.test_module) spec = self.machinery.WindowsRegistryFinder.find_spec(self.test_module) - self.assertIsNone(loader) self.assertIsNone(spec) (Frozen_WindowsRegistryFinderTests, diff --git a/Lib/test/test_importlib/util.py b/Lib/test/test_importlib/util.py index e348733f6ce3c3..c25be096e52874 100644 --- a/Lib/test/test_importlib/util.py +++ b/Lib/test/test_importlib/util.py @@ -194,8 +194,7 @@ def import_state(**kwargs): new_value = default setattr(sys, attr, new_value) if len(kwargs): - raise ValueError( - 'unrecognized arguments: {0}'.format(kwargs.keys())) + raise ValueError('unrecognized arguments: {}'.format(kwargs)) yield finally: for attr, value in originals.items(): @@ -243,30 +242,6 @@ def __exit__(self, *exc_info): self._uncache.__exit__(None, None, None) -class mock_modules(_ImporterMock): - - """Importer mock using PEP 302 APIs.""" - - def find_module(self, fullname, path=None): - if fullname not in self.modules: - return None - else: - return self - - def load_module(self, fullname): - if fullname not in self.modules: - raise ImportError - else: - sys.modules[fullname] = self.modules[fullname] - if fullname in self.module_code: - try: - self.module_code[fullname]() - except Exception: - del sys.modules[fullname] - raise - return self.modules[fullname] - - class mock_spec(_ImporterMock): """Importer mock using PEP 451 APIs.""" diff --git a/Lib/test/test_inspect.py b/Lib/test/test_inspect.py index 42e3d709bd683f..364f75db908b05 100644 --- a/Lib/test/test_inspect.py +++ b/Lib/test/test_inspect.py @@ -2111,6 +2111,28 @@ def __dict__(self): self.assertEqual(inspect.getattr_static(foo, 'a'), 3) self.assertFalse(test.called) + def test_mutated_mro(self): + test = self + test.called = False + + class Foo(dict): + a = 3 + @property + def __dict__(self): + test.called = True + return {} + + class Bar(dict): + a = 4 + + class Baz(Bar): pass + + baz = Baz() + self.assertEqual(inspect.getattr_static(baz, 'a'), 4) + Baz.__bases__ = (Foo,) + self.assertEqual(inspect.getattr_static(baz, 'a'), 3) + self.assertFalse(test.called) + def test_custom_object_dict(self): test = self test.called = False @@ -2165,6 +2187,35 @@ class Thing(metaclass=Meta): inspect.getattr_static(Thing, "spam") self.assertFalse(Thing.executed) + def test_custom___getattr__(self): + test = self + test.called = False + + class Foo: + def __getattr__(self, attr): + test.called = True + return {} + + with self.assertRaises(AttributeError): + inspect.getattr_static(Foo(), 'whatever') + + self.assertFalse(test.called) + + def test_custom___getattribute__(self): + test = self + test.called = False + + class Foo: + def __getattribute__(self, attr): + test.called = True + return {} + + with self.assertRaises(AttributeError): + inspect.getattr_static(Foo(), 'really_could_be_anything') + + self.assertFalse(test.called) + + class TestGetGeneratorState(unittest.TestCase): def setUp(self): @@ -4229,14 +4280,14 @@ def test(*args, **kwargs): @cpython_only def test_signature_bind_implicit_arg(self): - # Issue #19611: getcallargs should work with set comprehensions + # Issue #19611: getcallargs should work with comprehensions def make_set(): - return {z * z for z in range(5)} - setcomp_code = make_set.__code__.co_consts[1] - setcomp_func = types.FunctionType(setcomp_code, {}) + return set(z * z for z in range(5)) + gencomp_code = make_set.__code__.co_consts[1] + gencomp_func = types.FunctionType(gencomp_code, {}) iterator = iter(range(5)) - self.assertEqual(self.call(setcomp_func, iterator), {0, 1, 4, 9, 16}) + self.assertEqual(set(self.call(gencomp_func, iterator)), {0, 1, 4, 9, 16}) def test_signature_bind_posonly_kwargs(self): def foo(bar, /, **kwargs): diff --git a/Lib/test/test_listcomps.py b/Lib/test/test_listcomps.py index 91bf2547edc4ae..92fed98dd0004a 100644 --- a/Lib/test/test_listcomps.py +++ b/Lib/test/test_listcomps.py @@ -1,4 +1,5 @@ import doctest +import textwrap import unittest @@ -87,63 +88,227 @@ >>> [None for i in range(10)] [None, None, None, None, None, None, None, None, None, None] -########### Tests for various scoping corner cases ############ - -Return lambdas that use the iteration variable as a default argument - - >>> items = [(lambda i=i: i) for i in range(5)] - >>> [x() for x in items] - [0, 1, 2, 3, 4] - -Same again, only this time as a closure variable - - >>> items = [(lambda: i) for i in range(5)] - >>> [x() for x in items] - [4, 4, 4, 4, 4] - -Another way to test that the iteration variable is local to the list comp - - >>> items = [(lambda: i) for i in range(5)] - >>> i = 20 - >>> [x() for x in items] - [4, 4, 4, 4, 4] - -And confirm that a closure can jump over the list comp scope - - >>> items = [(lambda: y) for i in range(5)] - >>> y = 2 - >>> [x() for x in items] - [2, 2, 2, 2, 2] - -We also repeat each of the above scoping tests inside a function - - >>> def test_func(): - ... items = [(lambda i=i: i) for i in range(5)] - ... return [x() for x in items] - >>> test_func() - [0, 1, 2, 3, 4] +""" - >>> def test_func(): - ... items = [(lambda: i) for i in range(5)] - ... return [x() for x in items] - >>> test_func() - [4, 4, 4, 4, 4] - - >>> def test_func(): - ... items = [(lambda: i) for i in range(5)] - ... i = 20 - ... return [x() for x in items] - >>> test_func() - [4, 4, 4, 4, 4] - - >>> def test_func(): - ... items = [(lambda: y) for i in range(5)] - ... y = 2 - ... return [x() for x in items] - >>> test_func() - [2, 2, 2, 2, 2] -""" +class ListComprehensionTest(unittest.TestCase): + def _check_in_scopes(self, code, outputs=None, ns=None, scopes=None, raises=()): + code = textwrap.dedent(code) + scopes = scopes or ["module", "class", "function"] + for scope in scopes: + with self.subTest(scope=scope): + if scope == "class": + newcode = textwrap.dedent(""" + class _C: + {code} + """).format(code=textwrap.indent(code, " ")) + def get_output(moddict, name): + return getattr(moddict["_C"], name) + elif scope == "function": + newcode = textwrap.dedent(""" + def _f(): + {code} + return locals() + _out = _f() + """).format(code=textwrap.indent(code, " ")) + def get_output(moddict, name): + return moddict["_out"][name] + else: + newcode = code + def get_output(moddict, name): + return moddict[name] + ns = ns or {} + try: + exec(newcode, ns) + except raises as e: + # We care about e.g. NameError vs UnboundLocalError + self.assertIs(type(e), raises) + else: + for k, v in (outputs or {}).items(): + self.assertEqual(get_output(ns, k), v) + + def test_lambdas_with_iteration_var_as_default(self): + code = """ + items = [(lambda i=i: i) for i in range(5)] + y = [x() for x in items] + """ + outputs = {"y": [0, 1, 2, 3, 4]} + self._check_in_scopes(code, outputs) + + def test_lambdas_with_free_var(self): + code = """ + items = [(lambda: i) for i in range(5)] + y = [x() for x in items] + """ + outputs = {"y": [4, 4, 4, 4, 4]} + self._check_in_scopes(code, outputs) + + def test_class_scope_free_var_with_class_cell(self): + class C: + def method(self): + super() + return __class__ + items = [(lambda: i) for i in range(5)] + y = [x() for x in items] + + self.assertEqual(C.y, [4, 4, 4, 4, 4]) + self.assertIs(C().method(), C) + + def test_inner_cell_shadows_outer(self): + code = """ + items = [(lambda: i) for i in range(5)] + i = 20 + y = [x() for x in items] + """ + outputs = {"y": [4, 4, 4, 4, 4], "i": 20} + self._check_in_scopes(code, outputs) + + def test_closure_can_jump_over_comp_scope(self): + code = """ + items = [(lambda: y) for i in range(5)] + y = 2 + z = [x() for x in items] + """ + outputs = {"z": [2, 2, 2, 2, 2]} + self._check_in_scopes(code, outputs) + + def test_inner_cell_shadows_outer_redefined(self): + code = """ + y = 10 + items = [(lambda: y) for y in range(5)] + x = y + y = 20 + out = [z() for z in items] + """ + outputs = {"x": 10, "out": [4, 4, 4, 4, 4]} + self._check_in_scopes(code, outputs) + + def test_shadows_outer_cell(self): + code = """ + def inner(): + return g + [g for g in range(5)] + x = inner() + """ + outputs = {"x": -1} + self._check_in_scopes(code, outputs, ns={"g": -1}) + + def test_assignment_expression(self): + code = """ + x = -1 + items = [(x:=y) for y in range(3)] + """ + outputs = {"x": 2} + # assignment expression in comprehension is disallowed in class scope + self._check_in_scopes(code, outputs, scopes=["module", "function"]) + + def test_free_var_in_comp_child(self): + code = """ + lst = range(3) + funcs = [lambda: x for x in lst] + inc = [x + 1 for x in lst] + [x for x in inc] + x = funcs[0]() + """ + outputs = {"x": 2} + self._check_in_scopes(code, outputs) + + def test_shadow_with_free_and_local(self): + code = """ + lst = range(3) + x = -1 + funcs = [lambda: x for x in lst] + items = [x + 1 for x in lst] + """ + outputs = {"x": -1} + self._check_in_scopes(code, outputs) + + def test_shadow_comp_iterable_name(self): + code = """ + x = [1] + y = [x for x in x] + """ + outputs = {"x": [1]} + self._check_in_scopes(code, outputs) + + def test_nested_free(self): + code = """ + x = 1 + def g(): + [x for x in range(3)] + return x + g() + """ + outputs = {"x": 1} + self._check_in_scopes(code, outputs) + + def test_introspecting_frame_locals(self): + code = """ + import sys + [i for i in range(2)] + i = 20 + sys._getframe().f_locals + """ + outputs = {"i": 20} + self._check_in_scopes(code, outputs) + + def test_nested(self): + code = """ + l = [2, 3] + y = [[x ** 2 for x in range(x)] for x in l] + """ + outputs = {"y": [[0, 1], [0, 1, 4]]} + self._check_in_scopes(code, outputs) + + def test_nested_2(self): + code = """ + l = [1, 2, 3] + x = 3 + y = [x for [x ** x for x in range(x)][x - 1] in l] + """ + outputs = {"y": [3, 3, 3]} + self._check_in_scopes(code, outputs) + + def test_nested_3(self): + code = """ + l = [(1, 2), (3, 4), (5, 6)] + y = [x for (x, [x ** x for x in range(x)][x - 1]) in l] + """ + outputs = {"y": [1, 3, 5]} + self._check_in_scopes(code, outputs) + + def test_nameerror(self): + code = """ + [x for x in [1]] + x + """ + + self._check_in_scopes(code, raises=NameError) + + def test_dunder_name(self): + code = """ + y = [__x for __x in [1]] + """ + outputs = {"y": [1]} + self._check_in_scopes(code, outputs) + + def test_unbound_local_after_comprehension(self): + def f(): + if False: + x = 0 + [x for x in [1]] + return x + + with self.assertRaises(UnboundLocalError): + f() + + def test_unbound_local_inside_comprehension(self): + def f(): + l = [None] + return [1 for (l[0], l) in [[1, 2]]] + + with self.assertRaises(UnboundLocalError): + f() __test__ = {'doctests' : doctests} diff --git a/Lib/test/test_module.py b/Lib/test/test_module.py index 70e4efea69359a..c7eb92290e1b6d 100644 --- a/Lib/test/test_module.py +++ b/Lib/test/test_module.py @@ -236,7 +236,7 @@ def test_module_repr_with_full_loader(self): # Yes, a class not an instance. m.__loader__ = FullLoader self.assertEqual( - repr(m), ")>") + repr(m), f")>") def test_module_repr_with_bare_loader_and_filename(self): m = ModuleType('foo') diff --git a/Lib/test/test_monitoring.py b/Lib/test/test_monitoring.py index 8a8eb37f4c8390..81db4ff0b8ff0a 100644 --- a/Lib/test/test_monitoring.py +++ b/Lib/test/test_monitoring.py @@ -877,6 +877,42 @@ def func3(): ('instruction', 'func3', 34), ('line', 'check_events', 11)]) + def test_with_restart(self): + def func1(): + line1 = 1 + line2 = 2 + line3 = 3 + + self.check_events(func1, recorders = LINE_AND_INSTRUCTION_RECORDERS, expected = [ + ('line', 'check_events', 10), + ('line', 'func1', 1), + ('instruction', 'func1', 2), + ('instruction', 'func1', 4), + ('line', 'func1', 2), + ('instruction', 'func1', 6), + ('instruction', 'func1', 8), + ('line', 'func1', 3), + ('instruction', 'func1', 10), + ('instruction', 'func1', 12), + ('instruction', 'func1', 14), + ('line', 'check_events', 11)]) + + sys.monitoring.restart_events() + + self.check_events(func1, recorders = LINE_AND_INSTRUCTION_RECORDERS, expected = [ + ('line', 'check_events', 10), + ('line', 'func1', 1), + ('instruction', 'func1', 2), + ('instruction', 'func1', 4), + ('line', 'func1', 2), + ('instruction', 'func1', 6), + ('instruction', 'func1', 8), + ('line', 'func1', 3), + ('instruction', 'func1', 10), + ('instruction', 'func1', 12), + ('instruction', 'func1', 14), + ('line', 'check_events', 11)]) + class TestInstallIncrementallly(MonitoringTestBase, unittest.TestCase): def check_events(self, func, must_include, tool=TEST_TOOL, recorders=(ExceptionRecorder,)): diff --git a/Lib/test/test_pathlib.py b/Lib/test/test_pathlib.py index 76cfadeedcea84..ee0ef9a34c385c 100644 --- a/Lib/test/test_pathlib.py +++ b/Lib/test/test_pathlib.py @@ -29,11 +29,12 @@ # class _BasePurePathSubclass(object): - init_called = False + def __init__(self, *pathsegments, session_id): + super().__init__(*pathsegments) + self.session_id = session_id - def __init__(self, *args): - super().__init__(*args) - self.init_called = True + def with_segments(self, *pathsegments): + return type(self)(*pathsegments, session_id=self.session_id) class _BasePurePathTest(object): @@ -81,9 +82,9 @@ def test_bytes(self): r"where __fspath__ returns a str, not 'bytes'") with self.assertRaisesRegex(TypeError, message): P(b'a') - with self.assertRaises(TypeError): + with self.assertRaisesRegex(TypeError, message): P(b'a', 'b') - with self.assertRaises(TypeError): + with self.assertRaisesRegex(TypeError, message): P('a', b'b') with self.assertRaises(TypeError): P('a').joinpath(b'b') @@ -121,20 +122,21 @@ def test_str_subclass_common(self): self._check_str_subclass('a/b.txt') self._check_str_subclass('/a/b.txt') - def test_init_called_common(self): + def test_with_segments_common(self): class P(_BasePurePathSubclass, self.cls): pass - p = P('foo', 'bar') - self.assertTrue((p / 'foo').init_called) - self.assertTrue(('foo' / p).init_called) - self.assertTrue(p.joinpath('foo').init_called) - self.assertTrue(p.with_name('foo').init_called) - self.assertTrue(p.with_stem('foo').init_called) - self.assertTrue(p.with_suffix('.foo').init_called) - self.assertTrue(p.relative_to('foo').init_called) - self.assertTrue(p.parent.init_called) + p = P('foo', 'bar', session_id=42) + self.assertEqual(42, (p / 'foo').session_id) + self.assertEqual(42, ('foo' / p).session_id) + self.assertEqual(42, p.joinpath('foo').session_id) + self.assertEqual(42, p.with_name('foo').session_id) + self.assertEqual(42, p.with_stem('foo').session_id) + self.assertEqual(42, p.with_suffix('.foo').session_id) + self.assertEqual(42, p.with_segments('foo').session_id) + self.assertEqual(42, p.relative_to('foo').session_id) + self.assertEqual(42, p.parent.session_id) for parent in p.parents: - self.assertTrue(parent.init_called) + self.assertEqual(42, parent.session_id) def _get_drive_root_parts(self, parts): path = self.cls(*parts) @@ -1647,6 +1649,27 @@ def test_home(self): env['HOME'] = os.path.join(BASE, 'home') self._test_home(self.cls.home()) + def test_with_segments(self): + class P(_BasePurePathSubclass, self.cls): + pass + p = P(BASE, session_id=42) + self.assertEqual(42, p.absolute().session_id) + self.assertEqual(42, p.resolve().session_id) + if not is_wasi: # WASI has no user accounts. + self.assertEqual(42, p.with_segments('~').expanduser().session_id) + self.assertEqual(42, (p / 'fileA').rename(p / 'fileB').session_id) + self.assertEqual(42, (p / 'fileB').replace(p / 'fileA').session_id) + if os_helper.can_symlink(): + self.assertEqual(42, (p / 'linkA').readlink().session_id) + for path in p.iterdir(): + self.assertEqual(42, path.session_id) + for path in p.glob('*'): + self.assertEqual(42, path.session_id) + for path in p.rglob('*'): + self.assertEqual(42, path.session_id) + for dirpath, dirnames, filenames in p.walk(): + self.assertEqual(42, dirpath.session_id) + def test_samefile(self): fileA_path = os.path.join(BASE, 'fileA') fileB_path = os.path.join(BASE, 'dirB', 'fileB') @@ -1700,6 +1723,8 @@ def test_exists(self): self.assertIs(True, (p / 'linkB').exists()) self.assertIs(True, (p / 'linkB' / 'fileB').exists()) self.assertIs(False, (p / 'linkA' / 'bah').exists()) + self.assertIs(False, (p / 'brokenLink').exists()) + self.assertIs(True, (p / 'brokenLink').exists(follow_symlinks=False)) self.assertIs(False, (p / 'foo').exists()) self.assertIs(False, P('/xyzzy').exists()) self.assertIs(False, P(BASE + '\udfff').exists()) @@ -1806,21 +1831,36 @@ def _check(glob, expected): _check(p.glob("*/fileB"), ['dirB/fileB']) else: _check(p.glob("*/fileB"), ['dirB/fileB', 'linkB/fileB']) + if os_helper.can_symlink(): + _check(p.glob("brokenLink"), ['brokenLink']) if not os_helper.can_symlink(): _check(p.glob("*/"), ["dirA", "dirB", "dirC", "dirE"]) else: _check(p.glob("*/"), ["dirA", "dirB", "dirC", "dirE", "linkB"]) + def test_glob_case_sensitive(self): + P = self.cls + def _check(path, pattern, case_sensitive, expected): + actual = {str(q) for q in path.glob(pattern, case_sensitive=case_sensitive)} + expected = {str(P(BASE, q)) for q in expected} + self.assertEqual(actual, expected) + path = P(BASE) + _check(path, "DIRB/FILE*", True, []) + _check(path, "DIRB/FILE*", False, ["dirB/fileB"]) + _check(path, "dirb/file*", True, []) + _check(path, "dirb/file*", False, ["dirB/fileB"]) + def test_rglob_common(self): def _check(glob, expected): - self.assertEqual(set(glob), { P(BASE, q) for q in expected }) + self.assertEqual(sorted(glob), sorted(P(BASE, q) for q in expected)) P = self.cls p = P(BASE) it = p.rglob("fileA") self.assertIsInstance(it, collections.abc.Iterator) _check(it, ["fileA"]) _check(p.rglob("fileB"), ["dirB/fileB"]) + _check(p.rglob("**/fileB"), ["dirB/fileB"]) _check(p.rglob("*/fileA"), []) if not os_helper.can_symlink(): _check(p.rglob("*/fileB"), ["dirB/fileB"]) @@ -1844,9 +1884,12 @@ def _check(glob, expected): _check(p.rglob("*"), ["dirC/fileC", "dirC/novel.txt", "dirC/dirD", "dirC/dirD/fileD"]) _check(p.rglob("file*"), ["dirC/fileC", "dirC/dirD/fileD"]) + _check(p.rglob("**/file*"), ["dirC/fileC", "dirC/dirD/fileD"]) + _check(p.rglob("dir*/**"), ["dirC/dirD"]) _check(p.rglob("*/*"), ["dirC/dirD/fileD"]) _check(p.rglob("*/"), ["dirC/dirD"]) _check(p.rglob(""), ["dirC", "dirC/dirD"]) + _check(p.rglob("**"), ["dirC", "dirC/dirD"]) # gh-91616, a re module regression _check(p.rglob("*.txt"), ["dirC/novel.txt"]) _check(p.rglob("*.*"), ["dirC/novel.txt"]) @@ -1892,8 +1935,13 @@ def test_glob_dotdot(self): P = self.cls p = P(BASE) self.assertEqual(set(p.glob("..")), { P(BASE, "..") }) + self.assertEqual(set(p.glob("../..")), { P(BASE, "..", "..") }) + self.assertEqual(set(p.glob("dirA/..")), { P(BASE, "dirA", "..") }) self.assertEqual(set(p.glob("dirA/../file*")), { P(BASE, "dirA/../fileA") }) + self.assertEqual(set(p.glob("dirA/../file*/..")), set()) self.assertEqual(set(p.glob("../xyzzy")), set()) + self.assertEqual(set(p.glob("xyzzy/..")), set()) + self.assertEqual(set(p.glob("/".join([".."] * 50))), { P(BASE, *[".."] * 50)}) @os_helper.skip_unless_symlink def test_glob_permissions(self): @@ -3113,7 +3161,7 @@ def test_glob(self): self.assertEqual(set(p.glob("FILEa")), { P(BASE, "fileA") }) self.assertEqual(set(p.glob("*a\\")), { P(BASE, "dirA") }) self.assertEqual(set(p.glob("F*a")), { P(BASE, "fileA") }) - self.assertEqual(set(map(str, p.glob("FILEa"))), {f"{p}\\FILEa"}) + self.assertEqual(set(map(str, p.glob("FILEa"))), {f"{p}\\fileA"}) self.assertEqual(set(map(str, p.glob("F*a"))), {f"{p}\\fileA"}) def test_rglob(self): @@ -3121,7 +3169,7 @@ def test_rglob(self): p = P(BASE, "dirC") self.assertEqual(set(p.rglob("FILEd")), { P(BASE, "dirC/dirD/fileD") }) self.assertEqual(set(p.rglob("*\\")), { P(BASE, "dirC/dirD") }) - self.assertEqual(set(map(str, p.rglob("FILEd"))), {f"{p}\\dirD\\FILEd"}) + self.assertEqual(set(map(str, p.rglob("FILEd"))), {f"{p}\\dirD\\fileD"}) def test_expanduser(self): P = self.cls diff --git a/Lib/test/test_pdb.py b/Lib/test/test_pdb.py index 2f712a10257984..482c92dbf1f6a0 100644 --- a/Lib/test/test_pdb.py +++ b/Lib/test/test_pdb.py @@ -746,6 +746,84 @@ def test_pdb_where_command(): (Pdb) continue """ +def test_convenience_variables(): + """Test convenience variables + + >>> def util_function(): + ... import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() + ... try: + ... raise Exception('test') + ... except: + ... pass + ... return 1 + + >>> def test_function(): + ... util_function() + + >>> with PdbTestInput([ # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE + ... '$_frame.f_lineno', # Check frame convenience variable + ... '$a = 10', # Set a convenience variable + ... '$a', # Print its value + ... 'p $a + 2', # Do some calculation + ... 'u', # Switch frame + ... '$_frame.f_lineno', # Make sure the frame changed + ... '$a', # Make sure the value persists + ... 'd', # Go back to the original frame + ... 'next', + ... '$a', # The value should be gone + ... 'next', + ... '$_exception', # Check exception convenience variable + ... 'next', + ... '$_exception', # Exception should be gone + ... 'return', + ... '$_retval', # Check return convenience variable + ... 'continue', + ... ]): + ... test_function() + > (3)util_function() + -> try: + (Pdb) $_frame.f_lineno + 3 + (Pdb) $a = 10 + (Pdb) $a + 10 + (Pdb) p $a + 2 + 12 + (Pdb) u + > (2)test_function() + -> util_function() + (Pdb) $_frame.f_lineno + 2 + (Pdb) $a + 10 + (Pdb) d + > (3)util_function() + -> try: + (Pdb) next + > (4)util_function() + -> raise Exception('test') + (Pdb) $a + *** KeyError: 'a' + (Pdb) next + Exception: test + > (4)util_function() + -> raise Exception('test') + (Pdb) $_exception + Exception('test') + (Pdb) next + > (5)util_function() + -> except: + (Pdb) $_exception + *** KeyError: '_exception' + (Pdb) return + --Return-- + > (7)util_function()->1 + -> return 1 + (Pdb) $_retval + 1 + (Pdb) continue + """ + def test_post_mortem(): """Test post mortem traceback debugging. diff --git a/Lib/test/test_perf_profiler.py b/Lib/test/test_perf_profiler.py index 2b977d78d39324..5418f9f35485f8 100644 --- a/Lib/test/test_perf_profiler.py +++ b/Lib/test/test_perf_profiler.py @@ -1,4 +1,5 @@ import unittest +import string import subprocess import sys import sysconfig @@ -70,9 +71,14 @@ def baz(): perf_file = pathlib.Path(f"/tmp/perf-{process.pid}.map") self.assertTrue(perf_file.exists()) perf_file_contents = perf_file.read_text() - self.assertIn(f"py::foo:{script}", perf_file_contents) - self.assertIn(f"py::bar:{script}", perf_file_contents) - self.assertIn(f"py::baz:{script}", perf_file_contents) + perf_lines = perf_file_contents.splitlines(); + expected_symbols = [f"py::foo:{script}", f"py::bar:{script}", f"py::baz:{script}"] + for expected_symbol in expected_symbols: + perf_line = next((line for line in perf_lines if expected_symbol in line), None) + self.assertIsNotNone(perf_line, f"Could not find {expected_symbol} in perf file") + perf_addr = perf_line.split(" ")[0] + self.assertFalse(perf_addr.startswith("0x"), "Address should not be prefixed with 0x") + self.assertTrue(set(perf_addr).issubset(string.hexdigits), "Address should contain only hex characters") def test_trampoline_works_with_forks(self): code = """if 1: diff --git a/Lib/test/test_pkgutil.py b/Lib/test/test_pkgutil.py index 4d9f5db3c6b3cf..6fcd726345eeac 100644 --- a/Lib/test/test_pkgutil.py +++ b/Lib/test/test_pkgutil.py @@ -1,6 +1,6 @@ from pathlib import Path from test.support.import_helper import unload, CleanImport -from test.support.warnings_helper import check_warnings +from test.support.warnings_helper import check_warnings, ignore_warnings import unittest import sys import importlib @@ -429,7 +429,7 @@ def test_iter_importers(self): importers = list(iter_importers(fullname)) expected_importer = get_importer(pathitem) for finder in importers: - spec = pkgutil._get_spec(finder, fullname) + spec = finder.find_spec(fullname) loader = spec.loader try: loader = loader.loader @@ -441,7 +441,7 @@ def test_iter_importers(self): self.assertEqual(finder, expected_importer) self.assertIsInstance(loader, importlib.machinery.SourceFileLoader) - self.assertIsNone(pkgutil._get_spec(finder, pkgname)) + self.assertIsNone(finder.find_spec(pkgname)) with self.assertRaises(ImportError): list(iter_importers('invalid.module')) @@ -535,31 +535,18 @@ class ImportlibMigrationTests(unittest.TestCase): # PEP 302 emulation in this module is in the process of being # deprecated in favour of importlib proper - def check_deprecated(self): - return check_warnings( - ("This emulation is deprecated and slated for removal in " - "Python 3.12; use 'importlib' instead", - DeprecationWarning)) - - def test_get_loader_avoids_emulation(self): - with check_warnings() as w: - self.assertIsNotNone(pkgutil.get_loader("sys")) - self.assertIsNotNone(pkgutil.get_loader("os")) - self.assertIsNotNone(pkgutil.get_loader("test.support")) - self.assertEqual(len(w.warnings), 0) - @unittest.skipIf(__name__ == '__main__', 'not compatible with __main__') + @ignore_warnings(category=DeprecationWarning) def test_get_loader_handles_missing_loader_attribute(self): global __loader__ this_loader = __loader__ del __loader__ try: - with check_warnings() as w: - self.assertIsNotNone(pkgutil.get_loader(__name__)) - self.assertEqual(len(w.warnings), 0) + self.assertIsNotNone(pkgutil.get_loader(__name__)) finally: __loader__ = this_loader + @ignore_warnings(category=DeprecationWarning) def test_get_loader_handles_missing_spec_attribute(self): name = 'spam' mod = type(sys)(name) @@ -569,6 +556,7 @@ def test_get_loader_handles_missing_spec_attribute(self): loader = pkgutil.get_loader(name) self.assertIsNone(loader) + @ignore_warnings(category=DeprecationWarning) def test_get_loader_handles_spec_attribute_none(self): name = 'spam' mod = type(sys)(name) @@ -578,6 +566,7 @@ def test_get_loader_handles_spec_attribute_none(self): loader = pkgutil.get_loader(name) self.assertIsNone(loader) + @ignore_warnings(category=DeprecationWarning) def test_get_loader_None_in_sys_modules(self): name = 'totally bogus' sys.modules[name] = None @@ -587,18 +576,26 @@ def test_get_loader_None_in_sys_modules(self): del sys.modules[name] self.assertIsNone(loader) + def test_get_loader_is_deprecated(self): + with check_warnings( + (r".*\bpkgutil.get_loader\b.*", DeprecationWarning), + ): + res = pkgutil.get_loader("sys") + self.assertIsNotNone(res) + + def test_find_loader_is_deprecated(self): + with check_warnings( + (r".*\bpkgutil.find_loader\b.*", DeprecationWarning), + ): + res = pkgutil.find_loader("sys") + self.assertIsNotNone(res) + + @ignore_warnings(category=DeprecationWarning) def test_find_loader_missing_module(self): name = 'totally bogus' loader = pkgutil.find_loader(name) self.assertIsNone(loader) - def test_find_loader_avoids_emulation(self): - with check_warnings() as w: - self.assertIsNotNone(pkgutil.find_loader("sys")) - self.assertIsNotNone(pkgutil.find_loader("os")) - self.assertIsNotNone(pkgutil.find_loader("test.support")) - self.assertEqual(len(w.warnings), 0) - def test_get_importer_avoids_emulation(self): # We use an illegal path so *none* of the path hooks should fire with check_warnings() as w: diff --git a/Lib/test/test_sqlite3/test_dbapi.py b/Lib/test/test_sqlite3/test_dbapi.py index 1bb0e13e356e78..328b0467e7fa3d 100644 --- a/Lib/test/test_sqlite3/test_dbapi.py +++ b/Lib/test/test_sqlite3/test_dbapi.py @@ -1495,6 +1495,14 @@ def test_blob_closed_db_read(self): "Cannot operate on a closed database", blob.read) + def test_blob_32bit_rowid(self): + # gh-100370: we should not get an OverflowError for 32-bit rowids + with memory_database() as cx: + rowid = 2**32 + cx.execute("create table t(t blob)") + cx.execute("insert into t(rowid, t) values (?, zeroblob(1))", (rowid,)) + cx.blobopen('t', 't', rowid) + @threading_helper.requires_working_threading() class ThreadTests(unittest.TestCase): diff --git a/Lib/test/test_stable_abi_ctypes.py b/Lib/test/test_stable_abi_ctypes.py index 2feaaf8603b831..4ca39d85e5460c 100644 --- a/Lib/test/test_stable_abi_ctypes.py +++ b/Lib/test/test_stable_abi_ctypes.py @@ -529,6 +529,7 @@ def test_windows_feature_macros(self): "PyObject_GetBuffer", "PyObject_GetItem", "PyObject_GetIter", + "PyObject_GetTypeData", "PyObject_HasAttr", "PyObject_HasAttrString", "PyObject_Hash", @@ -679,6 +680,7 @@ def test_windows_feature_macros(self): "PyType_GetName", "PyType_GetQualName", "PyType_GetSlot", + "PyType_GetTypeDataSize", "PyType_IsSubtype", "PyType_Modified", "PyType_Ready", diff --git a/Lib/test/test_super.py b/Lib/test/test_super.py index ed773a3cff2a6d..698ab48f48eaa1 100644 --- a/Lib/test/test_super.py +++ b/Lib/test/test_super.py @@ -359,7 +359,7 @@ class C: def method(self): return super().msg - with patch("test.test_super.super", MySuper) as m: + with patch(f"{__name__}.super", MySuper) as m: self.assertEqual(C().method(), "super super") def test_shadowed_dynamic_two_arg(self): @@ -373,7 +373,7 @@ class C: def method(self): return super(1, 2).msg - with patch("test.test_super.super", MySuper) as m: + with patch(f"{__name__}.super", MySuper) as m: self.assertEqual(C().method(), "super super") self.assertEqual(call_args, [(1, 2)]) diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py index fdd74c37e26235..97165264b34bbe 100644 --- a/Lib/test/test_threading.py +++ b/Lib/test/test_threading.py @@ -1349,6 +1349,7 @@ def func(): allow_threads={allowed}, allow_daemon_threads={daemon_allowed}, check_multi_interp_extensions=False, + own_gil=False, ) """) with test.support.SuppressCrashReport(): diff --git a/Lib/test/test_tix.py b/Lib/test/test_tix.py index 454baeb38a9342..d0d2a164ad2c67 100644 --- a/Lib/test/test_tix.py +++ b/Lib/test/test_tix.py @@ -5,7 +5,7 @@ from test.support import check_sanitizer if check_sanitizer(address=True, memory=True): - raise unittest.SkipTest("Tests involvin libX11 can SEGFAULT on ASAN/MSAN builds") + raise unittest.SkipTest("Tests involving libX11 can SEGFAULT on ASAN/MSAN builds") # Skip this test if the _tkinter module wasn't built. diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py index 283a7c23609e67..911b53e5816588 100644 --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -11,7 +11,7 @@ from test.test_grammar import (VALID_UNDERSCORE_LITERALS, INVALID_UNDERSCORE_LITERALS) from test.support import os_helper -from test.support.script_helper import run_test_script, make_script +from test.support.script_helper import run_test_script, make_script, run_python_until_end import os import token @@ -1470,6 +1470,19 @@ def test_comment_at_the_end_of_the_source_without_newline(self): self.assertEqual(tok_name[tokens[i + 1].exact_type], tok_name[expected_tokens[i]]) self.assertEqual(tok_name[tokens[-1].exact_type], tok_name[token.ENDMARKER]) + def test_invalid_character_in_fstring_middle(self): + # See gh-103824 + script = b'''F""" + \xe5"""''' + + with os_helper.temp_dir() as temp_dir: + filename = os.path.join(temp_dir, "script.py") + with open(filename, 'wb') as file: + file.write(script) + rs, _ = run_python_until_end(filename) + self.assertIn(b"SyntaxError", rs.err) + + class UntokenizeTest(TestCase): def test_bad_input_order(self): diff --git a/Lib/test/test_trace.py b/Lib/test/test_trace.py index fad2b3b8379ffc..73339ebdb7c4e9 100644 --- a/Lib/test/test_trace.py +++ b/Lib/test/test_trace.py @@ -187,9 +187,7 @@ def test_trace_list_comprehension(self): firstlineno_called = get_firstlineno(traced_doubler) expected = { (self.my_py_filename, firstlineno_calling + 1): 1, - # List comprehensions work differently in 3.x, so the count - # below changed compared to 2.x. - (self.my_py_filename, firstlineno_calling + 2): 12, + (self.my_py_filename, firstlineno_calling + 2): 11, (self.my_py_filename, firstlineno_calling + 3): 1, (self.my_py_filename, firstlineno_called + 1): 10, } diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py index 7c6a521c3c48f8..f162e587810ac0 100644 --- a/Lib/test/test_typing.py +++ b/Lib/test/test_typing.py @@ -880,6 +880,11 @@ def test_cannot_be_called(self): with self.assertRaises(TypeError): Unpack() + def test_usage_with_kwargs(self): + Movie = TypedDict('Movie', {'name': str, 'year': int}) + def foo(**kwargs: Unpack[Movie]): ... + self.assertEqual(repr(foo.__annotations__['kwargs']), + f"typing.Unpack[{__name__}.Movie]") class TypeVarTupleTests(BaseTestCase): @@ -1050,14 +1055,14 @@ class G2(Generic[Unpack[Ts]]): pass self.assertEqual(repr(Ts), 'Ts') - self.assertEqual(repr((*Ts,)[0]), '*Ts') - self.assertEqual(repr(Unpack[Ts]), '*Ts') + self.assertEqual(repr((*Ts,)[0]), 'typing.Unpack[Ts]') + self.assertEqual(repr(Unpack[Ts]), 'typing.Unpack[Ts]') - self.assertEqual(repr(tuple[*Ts]), 'tuple[*Ts]') - self.assertEqual(repr(Tuple[Unpack[Ts]]), 'typing.Tuple[*Ts]') + self.assertEqual(repr(tuple[*Ts]), 'tuple[typing.Unpack[Ts]]') + self.assertEqual(repr(Tuple[Unpack[Ts]]), 'typing.Tuple[typing.Unpack[Ts]]') - self.assertEqual(repr(*tuple[*Ts]), '*tuple[*Ts]') - self.assertEqual(repr(Unpack[Tuple[Unpack[Ts]]]), '*typing.Tuple[*Ts]') + self.assertEqual(repr(*tuple[*Ts]), '*tuple[typing.Unpack[Ts]]') + self.assertEqual(repr(Unpack[Tuple[Unpack[Ts]]]), 'typing.Unpack[typing.Tuple[typing.Unpack[Ts]]]') def test_variadic_class_repr_is_correct(self): Ts = TypeVarTuple('Ts') @@ -1074,86 +1079,86 @@ class B(Generic[Unpack[Ts]]): pass self.assertEndsWith(repr(A[*tuple[int, ...]]), 'A[*tuple[int, ...]]') self.assertEndsWith(repr(B[Unpack[Tuple[int, ...]]]), - 'B[*typing.Tuple[int, ...]]') + 'B[typing.Unpack[typing.Tuple[int, ...]]]') self.assertEndsWith(repr(A[float, *tuple[int, ...]]), 'A[float, *tuple[int, ...]]') self.assertEndsWith(repr(A[float, Unpack[Tuple[int, ...]]]), - 'A[float, *typing.Tuple[int, ...]]') + 'A[float, typing.Unpack[typing.Tuple[int, ...]]]') self.assertEndsWith(repr(A[*tuple[int, ...], str]), 'A[*tuple[int, ...], str]') self.assertEndsWith(repr(B[Unpack[Tuple[int, ...]], str]), - 'B[*typing.Tuple[int, ...], str]') + 'B[typing.Unpack[typing.Tuple[int, ...]], str]') self.assertEndsWith(repr(A[float, *tuple[int, ...], str]), 'A[float, *tuple[int, ...], str]') self.assertEndsWith(repr(B[float, Unpack[Tuple[int, ...]], str]), - 'B[float, *typing.Tuple[int, ...], str]') + 'B[float, typing.Unpack[typing.Tuple[int, ...]], str]') def test_variadic_class_alias_repr_is_correct(self): Ts = TypeVarTuple('Ts') class A(Generic[Unpack[Ts]]): pass B = A[*Ts] - self.assertEndsWith(repr(B), 'A[*Ts]') + self.assertEndsWith(repr(B), 'A[typing.Unpack[Ts]]') self.assertEndsWith(repr(B[()]), 'A[()]') self.assertEndsWith(repr(B[float]), 'A[float]') self.assertEndsWith(repr(B[float, str]), 'A[float, str]') C = A[Unpack[Ts]] - self.assertEndsWith(repr(C), 'A[*Ts]') + self.assertEndsWith(repr(C), 'A[typing.Unpack[Ts]]') self.assertEndsWith(repr(C[()]), 'A[()]') self.assertEndsWith(repr(C[float]), 'A[float]') self.assertEndsWith(repr(C[float, str]), 'A[float, str]') D = A[*Ts, int] - self.assertEndsWith(repr(D), 'A[*Ts, int]') + self.assertEndsWith(repr(D), 'A[typing.Unpack[Ts], int]') self.assertEndsWith(repr(D[()]), 'A[int]') self.assertEndsWith(repr(D[float]), 'A[float, int]') self.assertEndsWith(repr(D[float, str]), 'A[float, str, int]') E = A[Unpack[Ts], int] - self.assertEndsWith(repr(E), 'A[*Ts, int]') + self.assertEndsWith(repr(E), 'A[typing.Unpack[Ts], int]') self.assertEndsWith(repr(E[()]), 'A[int]') self.assertEndsWith(repr(E[float]), 'A[float, int]') self.assertEndsWith(repr(E[float, str]), 'A[float, str, int]') F = A[int, *Ts] - self.assertEndsWith(repr(F), 'A[int, *Ts]') + self.assertEndsWith(repr(F), 'A[int, typing.Unpack[Ts]]') self.assertEndsWith(repr(F[()]), 'A[int]') self.assertEndsWith(repr(F[float]), 'A[int, float]') self.assertEndsWith(repr(F[float, str]), 'A[int, float, str]') G = A[int, Unpack[Ts]] - self.assertEndsWith(repr(G), 'A[int, *Ts]') + self.assertEndsWith(repr(G), 'A[int, typing.Unpack[Ts]]') self.assertEndsWith(repr(G[()]), 'A[int]') self.assertEndsWith(repr(G[float]), 'A[int, float]') self.assertEndsWith(repr(G[float, str]), 'A[int, float, str]') H = A[int, *Ts, str] - self.assertEndsWith(repr(H), 'A[int, *Ts, str]') + self.assertEndsWith(repr(H), 'A[int, typing.Unpack[Ts], str]') self.assertEndsWith(repr(H[()]), 'A[int, str]') self.assertEndsWith(repr(H[float]), 'A[int, float, str]') self.assertEndsWith(repr(H[float, str]), 'A[int, float, str, str]') I = A[int, Unpack[Ts], str] - self.assertEndsWith(repr(I), 'A[int, *Ts, str]') + self.assertEndsWith(repr(I), 'A[int, typing.Unpack[Ts], str]') self.assertEndsWith(repr(I[()]), 'A[int, str]') self.assertEndsWith(repr(I[float]), 'A[int, float, str]') self.assertEndsWith(repr(I[float, str]), 'A[int, float, str, str]') J = A[*Ts, *tuple[str, ...]] - self.assertEndsWith(repr(J), 'A[*Ts, *tuple[str, ...]]') + self.assertEndsWith(repr(J), 'A[typing.Unpack[Ts], *tuple[str, ...]]') self.assertEndsWith(repr(J[()]), 'A[*tuple[str, ...]]') self.assertEndsWith(repr(J[float]), 'A[float, *tuple[str, ...]]') self.assertEndsWith(repr(J[float, str]), 'A[float, str, *tuple[str, ...]]') K = A[Unpack[Ts], Unpack[Tuple[str, ...]]] - self.assertEndsWith(repr(K), 'A[*Ts, *typing.Tuple[str, ...]]') - self.assertEndsWith(repr(K[()]), 'A[*typing.Tuple[str, ...]]') - self.assertEndsWith(repr(K[float]), 'A[float, *typing.Tuple[str, ...]]') - self.assertEndsWith(repr(K[float, str]), 'A[float, str, *typing.Tuple[str, ...]]') + self.assertEndsWith(repr(K), 'A[typing.Unpack[Ts], typing.Unpack[typing.Tuple[str, ...]]]') + self.assertEndsWith(repr(K[()]), 'A[typing.Unpack[typing.Tuple[str, ...]]]') + self.assertEndsWith(repr(K[float]), 'A[float, typing.Unpack[typing.Tuple[str, ...]]]') + self.assertEndsWith(repr(K[float, str]), 'A[float, str, typing.Unpack[typing.Tuple[str, ...]]]') def test_cannot_subclass(self): with self.assertRaisesRegex(TypeError, CANNOT_SUBCLASS_TYPE): @@ -1171,9 +1176,9 @@ class C(type(Unpack[Ts])): pass with self.assertRaisesRegex(TypeError, r'Cannot subclass typing\.Unpack'): class C(Unpack): pass - with self.assertRaisesRegex(TypeError, r'Cannot subclass \*Ts'): + with self.assertRaisesRegex(TypeError, r'Cannot subclass typing.Unpack\[Ts\]'): class C(*Ts): pass - with self.assertRaisesRegex(TypeError, r'Cannot subclass \*Ts'): + with self.assertRaisesRegex(TypeError, r'Cannot subclass typing.Unpack\[Ts\]'): class C(Unpack[Ts]): pass def test_variadic_class_args_are_correct(self): @@ -4108,13 +4113,13 @@ class TsP(Generic[*Ts, P]): MyCallable[[int], bool]: "MyCallable[[int], bool]", MyCallable[[int, str], bool]: "MyCallable[[int, str], bool]", MyCallable[[int, list[int]], bool]: "MyCallable[[int, list[int]], bool]", - MyCallable[Concatenate[*Ts, P], T]: "MyCallable[typing.Concatenate[*Ts, ~P], ~T]", + MyCallable[Concatenate[*Ts, P], T]: "MyCallable[typing.Concatenate[typing.Unpack[Ts], ~P], ~T]", DoubleSpec[P2, P, T]: "DoubleSpec[~P2, ~P, ~T]", DoubleSpec[[int], [str], bool]: "DoubleSpec[[int], [str], bool]", DoubleSpec[[int, int], [str, str], bool]: "DoubleSpec[[int, int], [str, str], bool]", - TsP[*Ts, P]: "TsP[*Ts, ~P]", + TsP[*Ts, P]: "TsP[typing.Unpack[Ts], ~P]", TsP[int, str, list[int], []]: "TsP[int, str, list[int], []]", TsP[int, [str, list[int]]]: "TsP[int, [str, list[int]]]", diff --git a/Lib/test/test_unittest/test_assertions.py b/Lib/test/test_unittest/test_assertions.py index 6557104b81fc0f..5c1a28ecda5b49 100644 --- a/Lib/test/test_unittest/test_assertions.py +++ b/Lib/test/test_unittest/test_assertions.py @@ -273,9 +273,9 @@ def testAssertDictEqual(self): def testAssertMultiLineEqual(self): self.assertMessages('assertMultiLineEqual', ("", "foo"), - [r"\+ foo$", "^oops$", - r"\+ foo$", - r"\+ foo : oops$"]) + [r"\+ foo\n$", "^oops$", + r"\+ foo\n$", + r"\+ foo\n : oops$"]) def testAssertLess(self): self.assertMessages('assertLess', (2, 1), diff --git a/Lib/test/test_unittest/test_case.py b/Lib/test/test_unittest/test_case.py index dd5ff6d553e61d..ed5eb5609a5dd1 100644 --- a/Lib/test/test_unittest/test_case.py +++ b/Lib/test/test_unittest/test_case.py @@ -1149,6 +1149,66 @@ def testAssertEqualSingleLine(self): error = str(e).split('\n', 1)[1] self.assertEqual(sample_text_error, error) + def testAssertEqualwithEmptyString(self): + '''Verify when there is an empty string involved, the diff output + does not treat the empty string as a single empty line. It should + instead be handled as a non-line. + ''' + sample_text = '' + revised_sample_text = 'unladen swallows fly quickly' + sample_text_error = '''\ ++ unladen swallows fly quickly +''' + try: + self.assertEqual(sample_text, revised_sample_text) + except self.failureException as e: + # need to remove the first line of the error message + error = str(e).split('\n', 1)[1] + self.assertEqual(sample_text_error, error) + + def testAssertEqualMultipleLinesMissingNewlineTerminator(self): + '''Verifying format of diff output from assertEqual involving strings + with multiple lines, but missing the terminating newline on both. + ''' + sample_text = 'laden swallows\nfly sloely' + revised_sample_text = 'laden swallows\nfly slowly' + sample_text_error = '''\ + laden swallows +- fly sloely +? ^ ++ fly slowly +? ^ +''' + try: + self.assertEqual(sample_text, revised_sample_text) + except self.failureException as e: + # need to remove the first line of the error message + error = str(e).split('\n', 1)[1] + self.assertEqual(sample_text_error, error) + + def testAssertEqualMultipleLinesMismatchedNewlinesTerminators(self): + '''Verifying format of diff output from assertEqual involving strings + with multiple lines and mismatched newlines. The output should + include a - on it's own line to indicate the newline difference + between the two strings + ''' + sample_text = 'laden swallows\nfly sloely\n' + revised_sample_text = 'laden swallows\nfly slowly' + sample_text_error = '''\ + laden swallows +- fly sloely +? ^ ++ fly slowly +? ^ +-\x20 +''' + try: + self.assertEqual(sample_text, revised_sample_text) + except self.failureException as e: + # need to remove the first line of the error message + error = str(e).split('\n', 1)[1] + self.assertEqual(sample_text_error, error) + def testEqualityBytesWarning(self): if sys.flags.bytes_warning: def bytes_warning(): diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py index 80fb9e5cd2a445..dcdbb1cc64fd28 100644 --- a/Lib/test/test_urlparse.py +++ b/Lib/test/test_urlparse.py @@ -72,20 +72,20 @@ class UrlParseTestCase(unittest.TestCase): def checkRoundtrips(self, url, parsed, split): result = urllib.parse.urlparse(url) - self.assertEqual(result, parsed) + self.assertSequenceEqual(result, parsed) t = (result.scheme, result.netloc, result.path, result.params, result.query, result.fragment) - self.assertEqual(t, parsed) + self.assertSequenceEqual(t, parsed) # put it back together and it should be the same result2 = urllib.parse.urlunparse(result) - self.assertEqual(result2, url) - self.assertEqual(result2, result.geturl()) + self.assertSequenceEqual(result2, url) + self.assertSequenceEqual(result2, result.geturl()) # the result of geturl() is a fixpoint; we can always parse it # again to get the same result: result3 = urllib.parse.urlparse(result.geturl()) self.assertEqual(result3.geturl(), result.geturl()) - self.assertEqual(result3, result) + self.assertSequenceEqual(result3, result) self.assertEqual(result3.scheme, result.scheme) self.assertEqual(result3.netloc, result.netloc) self.assertEqual(result3.path, result.path) @@ -99,18 +99,18 @@ def checkRoundtrips(self, url, parsed, split): # check the roundtrip using urlsplit() as well result = urllib.parse.urlsplit(url) - self.assertEqual(result, split) + self.assertSequenceEqual(result, split) t = (result.scheme, result.netloc, result.path, result.query, result.fragment) - self.assertEqual(t, split) + self.assertSequenceEqual(t, split) result2 = urllib.parse.urlunsplit(result) - self.assertEqual(result2, url) - self.assertEqual(result2, result.geturl()) + self.assertSequenceEqual(result2, url) + self.assertSequenceEqual(result2, result.geturl()) # check the fixpoint property of re-parsing the result of geturl() result3 = urllib.parse.urlsplit(result.geturl()) self.assertEqual(result3.geturl(), result.geturl()) - self.assertEqual(result3, result) + self.assertSequenceEqual(result3, result) self.assertEqual(result3.scheme, result.scheme) self.assertEqual(result3.netloc, result.netloc) self.assertEqual(result3.path, result.path) @@ -162,10 +162,15 @@ def test_roundtrips(self): ('svn+ssh', 'svn.zope.org', '/repos/main/ZConfig/trunk/', '', '')), ('git+ssh://git@github.com/user/project.git', - ('git+ssh', 'git@github.com','/user/project.git', - '','',''), - ('git+ssh', 'git@github.com','/user/project.git', - '', '')), + ('git+ssh', 'git@github.com','/user/project.git', + '','',''), + ('git+ssh', 'git@github.com','/user/project.git', + '', '')), + ('itms-services://?action=download-manifest&url=https://example.com/app', + ('itms-services', '', '', '', + 'action=download-manifest&url=https://example.com/app', ''), + ('itms-services', '', '', + 'action=download-manifest&url=https://example.com/app', '')), ] def _encode(t): return (t[0].encode('ascii'), diff --git a/Lib/test/test_uu.py b/Lib/test/test_uu.py index 0493aae4fc67be..a189d6bc4b05d3 100644 --- a/Lib/test/test_uu.py +++ b/Lib/test/test_uu.py @@ -147,6 +147,34 @@ def test_newlines_escaped(self): uu.encode(inp, out, filename) self.assertIn(safefilename, out.getvalue()) + def test_no_directory_traversal(self): + relative_bad = b"""\ +begin 644 ../../../../../../../../tmp/test1 +$86)C"@`` +` +end +""" + with self.assertRaisesRegex(uu.Error, 'directory'): + uu.decode(io.BytesIO(relative_bad)) + if os.altsep: + relative_bad_bs = relative_bad.replace(b'/', b'\\') + with self.assertRaisesRegex(uu.Error, 'directory'): + uu.decode(io.BytesIO(relative_bad_bs)) + + absolute_bad = b"""\ +begin 644 /tmp/test2 +$86)C"@`` +` +end +""" + with self.assertRaisesRegex(uu.Error, 'directory'): + uu.decode(io.BytesIO(absolute_bad)) + if os.altsep: + absolute_bad_bs = absolute_bad.replace(b'/', b'\\') + with self.assertRaisesRegex(uu.Error, 'directory'): + uu.decode(io.BytesIO(absolute_bad_bs)) + + class UUStdIOTest(unittest.TestCase): def setUp(self): diff --git a/Lib/test/test_zipimport.py b/Lib/test/test_zipimport.py index 52d43bdead67f8..14c19719e260c4 100644 --- a/Lib/test/test_zipimport.py +++ b/Lib/test/test_zipimport.py @@ -836,7 +836,6 @@ def _testBogusZipFile(self): self.assertRaises(TypeError, z.get_source, None) error = zipimport.ZipImportError - self.assertIsNone(z.find_module('abc')) self.assertIsNone(z.find_spec('abc')) with warnings.catch_warnings(): diff --git a/Lib/traceback.py b/Lib/traceback.py index ba4a9ffd001b53..419f6e81b5e1be 100644 --- a/Lib/traceback.py +++ b/Lib/traceback.py @@ -179,12 +179,12 @@ def _safe_string(value, what, func=str): # -- def print_exc(limit=None, file=None, chain=True): - """Shorthand for 'print_exception(*sys.exc_info(), limit, file, chain)'.""" - print_exception(*sys.exc_info(), limit=limit, file=file, chain=chain) + """Shorthand for 'print_exception(sys.exception(), limit, file, chain)'.""" + print_exception(sys.exception(), limit=limit, file=file, chain=chain) def format_exc(limit=None, chain=True): """Like print_exc() but return a string.""" - return "".join(format_exception(*sys.exc_info(), limit=limit, chain=chain)) + return "".join(format_exception(sys.exception(), limit=limit, chain=chain)) def print_last(limit=None, file=None, chain=True): """This is a shorthand for 'print_exception(sys.last_exc, limit, file, chain)'.""" diff --git a/Lib/turtle.py b/Lib/turtle.py index 2de406e0f517af..cf111158b7c149 100644 --- a/Lib/turtle.py +++ b/Lib/turtle.py @@ -21,7 +21,6 @@ # misrepresented as being the original software. # 3. This notice may not be removed or altered from any source distribution. - """ Turtle graphics is a popular way for introducing programming to kids. It was part of the original Logo programming language developed @@ -97,13 +96,8 @@ Behind the scenes there are some features included with possible extensions in mind. These will be commented and documented elsewhere. - """ -_ver = "turtle 1.1b- - for Python 3.1 - 4. 5. 2009" - -# print(_ver) - import tkinter as TK import types import math @@ -141,7 +135,7 @@ _tg_utilities = ['write_docstringdict', 'done'] __all__ = (_tg_classes + _tg_screen_functions + _tg_turtle_functions + - _tg_utilities + ['Terminator']) # + _math_functions) + _tg_utilities + ['Terminator']) _alias_list = ['addshape', 'backward', 'bk', 'fd', 'ht', 'lt', 'pd', 'pos', 'pu', 'rt', 'seth', 'setpos', 'setposition', 'st', @@ -598,9 +592,6 @@ def _write(self, pos, txt, align, font, pencolor): x0, y0, x1, y1 = self.cv.bbox(item) return item, x1-1 -## def _dot(self, pos, size, color): -## """may be implemented for some other graphics toolkit""" - def _onclick(self, item, fun, num=1, add=None): """Bind fun to mouse-click event on turtle. fun must be a function with two arguments, the coordinates @@ -2726,7 +2717,7 @@ def _cc(self, args): if not ((0 <= r <= 255) and (0 <= g <= 255) and (0 <= b <= 255)): raise TurtleGraphicsError("bad color sequence: %s" % str(args)) return "#%02x%02x%02x" % (r, g, b) - + def teleport(self, x=None, y=None, *, fill_gap: bool = False) -> None: """Instantly move turtle to an absolute position. @@ -2738,14 +2729,14 @@ def teleport(self, x=None, y=None, *, fill_gap: bool = False) -> None: call: teleport(x, y) # two coordinates --or: teleport(x) # teleport to x position, keeping y as is --or: teleport(y=y) # teleport to y position, keeping x as is - --or: teleport(x, y, fill_gap=True) + --or: teleport(x, y, fill_gap=True) # teleport but fill the gap in between Move turtle to an absolute position. Unlike goto(x, y), a line will not be drawn. The turtle's orientation does not change. If currently filling, the polygon(s) teleported from will be filled after leaving, and filling will begin again after teleporting. This can be disabled - with fill_gap=True, which makes the imaginary line traveled during + with fill_gap=True, which makes the imaginary line traveled during teleporting act as a fill barrier like in goto(x, y). Example (for a Turtle instance named turtle): @@ -2773,7 +2764,7 @@ def teleport(self, x=None, y=None, *, fill_gap: bool = False) -> None: self._position = Vec2D(new_x, new_y) self.pen(pendown=pendown) if was_filling and not fill_gap: - self.begin_fill() + self.begin_fill() def clone(self): """Create and return a clone of the turtle. @@ -3455,27 +3446,22 @@ def dot(self, size=None, *color): if size is None: size = self._pensize + max(self._pensize, 4) color = self._colorstr(color) - if hasattr(self.screen, "_dot"): - item = self.screen._dot(self._position, size, color) - self.items.append(item) - if self.undobuffer: - self.undobuffer.push(("dot", item)) - else: - pen = self.pen() - if self.undobuffer: - self.undobuffer.push(["seq"]) - self.undobuffer.cumulate = True - try: - if self.resizemode() == 'auto': - self.ht() - self.pendown() - self.pensize(size) - self.pencolor(color) - self.forward(0) - finally: - self.pen(pen) - if self.undobuffer: - self.undobuffer.cumulate = False + # If screen were to gain a dot function, see GH #104218. + pen = self.pen() + if self.undobuffer: + self.undobuffer.push(["seq"]) + self.undobuffer.cumulate = True + try: + if self.resizemode() == 'auto': + self.ht() + self.pendown() + self.pensize(size) + self.pencolor(color) + self.forward(0) + finally: + self.pen(pen) + if self.undobuffer: + self.undobuffer.cumulate = False def _write(self, txt, align, font): """Performs the writing for write() @@ -3751,11 +3737,6 @@ class _Screen(TurtleScreen): _title = _CFG["title"] def __init__(self): - # XXX there is no need for this code to be conditional, - # as there will be only a single _Screen instance, anyway - # XXX actually, the turtle demo is injecting root window, - # so perhaps the conditional creation of a root should be - # preserved (perhaps by passing it as an optional parameter) if _Screen._root is None: _Screen._root = self._root = _Root() self._root.title(_Screen._title) diff --git a/Lib/turtledemo/__main__.py b/Lib/turtledemo/__main__.py index caea022da4a688..f6c9d6aa6f9a32 100755 --- a/Lib/turtledemo/__main__.py +++ b/Lib/turtledemo/__main__.py @@ -203,10 +203,10 @@ def __init__(self, filename=None): def onResize(self, event): - cwidth = self._canvas.winfo_width() - cheight = self._canvas.winfo_height() - self._canvas.xview_moveto(0.5*(self.canvwidth-cwidth)/self.canvwidth) - self._canvas.yview_moveto(0.5*(self.canvheight-cheight)/self.canvheight) + cwidth = self.canvas.winfo_width() + cheight = self.canvas.winfo_height() + self.canvas.xview_moveto(0.5*(self.canvwidth-cwidth)/self.canvwidth) + self.canvas.yview_moveto(0.5*(self.canvheight-cheight)/self.canvheight) def makeTextFrame(self, root): self.text_frame = text_frame = Frame(root) @@ -237,19 +237,23 @@ def makeTextFrame(self, root): return text_frame def makeGraphFrame(self, root): + # t._Screen is a singleton class instantiated or retrieved + # by calling Screen. Since tdemo canvas needs a different + # configuration, we manually set class attributes before + # calling Screen and manually call superclass init after. turtle._Screen._root = root + self.canvwidth = 1000 self.canvheight = 800 - turtle._Screen._canvas = self._canvas = canvas = turtle.ScrolledCanvas( + turtle._Screen._canvas = self.canvas = canvas = turtle.ScrolledCanvas( root, 800, 600, self.canvwidth, self.canvheight) canvas.adjustScrolls() canvas._rootwindow.bind('', self.onResize) canvas._canvas['borderwidth'] = 0 - self.screen = _s_ = turtle.Screen() - turtle.TurtleScreen.__init__(_s_, _s_._canvas) - self.scanvas = _s_._canvas - turtle.RawTurtle.screens = [_s_] + self.screen = screen = turtle.Screen() + turtle.TurtleScreen.__init__(screen, canvas) + turtle.RawTurtle.screens = [screen] return canvas def set_txtsize(self, size): @@ -373,7 +377,7 @@ def startDemo(self): def clearCanvas(self): self.refreshCanvas() self.screen._delete("all") - self.scanvas.config(cursor="") + self.canvas.config(cursor="") self.configGUI(NORMAL, DISABLED, DISABLED) def stopIt(self): diff --git a/Lib/typing.py b/Lib/typing.py index 354bc80eb3abfa..0dacdd9031a776 100644 --- a/Lib/typing.py +++ b/Lib/typing.py @@ -1753,6 +1753,17 @@ class Bar(Generic[Unpack[Ts]]): ... Foo[*tuple[int, str]] class Bar(Generic[*Ts]): ... + The operator can also be used along with a `TypedDict` to annotate + `**kwargs` in a function signature. For instance: + + class Movie(TypedDict): + name: str + year: int + + # This function expects two keyword arguments - *name* of type `str` and + # *year* of type `int`. + def foo(**kwargs: Unpack[Movie]): ... + Note that there is only some runtime checking of this operator. Not everything the runtime allows may be accepted by static type checkers. @@ -1767,7 +1778,7 @@ class _UnpackGenericAlias(_GenericAlias, _root=True): def __repr__(self): # `Unpack` only takes one argument, so __args__ should contain only # a single item. - return '*' + repr(self.__args__[0]) + return f'typing.Unpack[{_type_repr(self.__args__[0])}]' def __getitem__(self, args): if self.__typing_is_unpacked_typevartuple__: @@ -2308,15 +2319,16 @@ def cast(typ, val): def assert_type(val, typ, /): """Ask a static type checker to confirm that the value is of the given type. - When the type checker encounters a call to assert_type(), it + At runtime this does nothing: it returns the first argument unchanged with no + checks or side effects, no matter the actual type of the argument. + + When a static type checker encounters a call to assert_type(), it emits an error if the value is not of the specified type:: def greet(name: str) -> None: assert_type(name, str) # ok assert_type(name, int) # type checker error - At runtime this returns the first argument unchanged and otherwise - does nothing. """ return val diff --git a/Lib/unittest/case.py b/Lib/unittest/case.py index 018f22e7ce0c73..001b640dc43ad6 100644 --- a/Lib/unittest/case.py +++ b/Lib/unittest/case.py @@ -1217,19 +1217,34 @@ def assertCountEqual(self, first, second, msg=None): def assertMultiLineEqual(self, first, second, msg=None): """Assert that two multi-line strings are equal.""" - self.assertIsInstance(first, str, 'First argument is not a string') - self.assertIsInstance(second, str, 'Second argument is not a string') + self.assertIsInstance(first, str, "First argument is not a string") + self.assertIsInstance(second, str, "Second argument is not a string") if first != second: - # don't use difflib if the strings are too long + # Don't use difflib if the strings are too long if (len(first) > self._diffThreshold or len(second) > self._diffThreshold): self._baseAssertEqual(first, second, msg) - firstlines = first.splitlines(keepends=True) - secondlines = second.splitlines(keepends=True) - if len(firstlines) == 1 and first.strip('\r\n') == first: - firstlines = [first + '\n'] - secondlines = [second + '\n'] + + # Append \n to both strings if either is missing the \n. + # This allows the final ndiff to show the \n difference. The + # exception here is if the string is empty, in which case no + # \n should be added + first_presplit = first + second_presplit = second + if first and second: + if first[-1] != '\n' or second[-1] != '\n': + first_presplit += '\n' + second_presplit += '\n' + elif second and second[-1] != '\n': + second_presplit += '\n' + elif first and first[-1] != '\n': + first_presplit += '\n' + + firstlines = first_presplit.splitlines(keepends=True) + secondlines = second_presplit.splitlines(keepends=True) + + # Generate the message and diff, then raise the exception standardMsg = '%s != %s' % _common_shorten_repr(first, second) diff = '\n' + ''.join(difflib.ndiff(firstlines, secondlines)) standardMsg = self._truncateMessage(standardMsg, diff) diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py index 5f95c5ff7f9c1c..777b7c53efe565 100644 --- a/Lib/urllib/parse.py +++ b/Lib/urllib/parse.py @@ -54,7 +54,7 @@ 'imap', 'wais', 'file', 'mms', 'https', 'shttp', 'snews', 'prospero', 'rtsp', 'rtspu', 'rsync', 'svn', 'svn+ssh', 'sftp', 'nfs', 'git', 'git+ssh', - 'ws', 'wss'] + 'ws', 'wss', 'itms-services'] uses_params = ['', 'ftp', 'hdl', 'prospero', 'http', 'imap', 'https', 'shttp', 'rtsp', 'rtspu', 'sip', 'sips', diff --git a/Lib/uu.py b/Lib/uu.py old mode 100755 new mode 100644 index 6f8805d8c5d0c6..26bb59ae073ec5 --- a/Lib/uu.py +++ b/Lib/uu.py @@ -133,7 +133,14 @@ def decode(in_file, out_file=None, mode=None, quiet=False): # If the filename isn't ASCII, what's up with that?!? out_file = hdrfields[2].rstrip(b' \t\r\n\f').decode("ascii") if os.path.exists(out_file): - raise Error('Cannot overwrite existing file: %s' % out_file) + raise Error(f'Cannot overwrite existing file: {out_file}') + if (out_file.startswith(os.sep) or + f'..{os.sep}' in out_file or ( + os.altsep and + (out_file.startswith(os.altsep) or + f'..{os.altsep}' in out_file)) + ): + raise Error(f'Refusing to write to {out_file} due to directory traversal') if mode is None: mode = int(hdrfields[1], 8) # diff --git a/Mac/BuildScript/build-installer.py b/Mac/BuildScript/build-installer.py index 63fa21b2b33d17..2f5937489ac03d 100755 --- a/Mac/BuildScript/build-installer.py +++ b/Mac/BuildScript/build-installer.py @@ -359,9 +359,9 @@ def library_recipes(): ), ), dict( - name="SQLite 3.40.1", - url="https://sqlite.org/2022/sqlite-autoconf-3400100.tar.gz", - checksum="42175b1a1d23529cb133bbd2b5900afd", + name="SQLite 3.41.2", + url="https://sqlite.org/2023/sqlite-autoconf-3410200.tar.gz", + checksum="862075fd1c38324878ef809eda39edfe", extra_cflags=('-Os ' '-DSQLITE_ENABLE_FTS5 ' '-DSQLITE_ENABLE_FTS4 ' diff --git a/Makefile.pre.in b/Makefile.pre.in index b285ef9e832db5..329466580b9cb0 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1376,9 +1376,11 @@ regen-opcode: $(PYTHON_FOR_REGEN) $(srcdir)/Tools/build/generate_opcode_h.py \ $(srcdir)/Lib/opcode.py \ $(srcdir)/Include/opcode.h.new \ - $(srcdir)/Include/internal/pycore_opcode.h.new + $(srcdir)/Include/internal/pycore_opcode.h.new \ + $(srcdir)/Include/internal/pycore_intrinsics.h.new $(UPDATE_FILE) $(srcdir)/Include/opcode.h $(srcdir)/Include/opcode.h.new $(UPDATE_FILE) $(srcdir)/Include/internal/pycore_opcode.h $(srcdir)/Include/internal/pycore_opcode.h.new + $(UPDATE_FILE) $(srcdir)/Include/internal/pycore_intrinsics.h $(srcdir)/Include/internal/pycore_intrinsics.h.new .PHONY: regen-token regen-token: @@ -2696,7 +2698,7 @@ MODULE__IO_DEPS=$(srcdir)/Modules/_io/_iomodule.h MODULE__MD5_DEPS=$(srcdir)/Modules/hashlib.h $(LIBHACL_HEADERS) Modules/_hacl/Hacl_Hash_MD5.h Modules/_hacl/Hacl_Hash_MD5.c MODULE__SHA1_DEPS=$(srcdir)/Modules/hashlib.h $(LIBHACL_HEADERS) Modules/_hacl/Hacl_Hash_SHA1.h Modules/_hacl/Hacl_Hash_SHA1.c MODULE__SHA2_DEPS=$(srcdir)/Modules/hashlib.h $(LIBHACL_SHA2_HEADERS) $(LIBHACL_SHA2_A) -MODULE__SHA3_DEPS=$(srcdir)/Modules/_sha3/sha3.c $(srcdir)/Modules/_sha3/sha3.h $(srcdir)/Modules/hashlib.h +MODULE__SHA3_DEPS=$(srcdir)/Modules/hashlib.h $(LIBHACL_HEADERS) Modules/_hacl/Hacl_Hash_SHA3.h Modules/_hacl/Hacl_Hash_SHA3.c MODULE__SOCKET_DEPS=$(srcdir)/Modules/socketmodule.h $(srcdir)/Modules/addrinfo.h $(srcdir)/Modules/getaddrinfo.c $(srcdir)/Modules/getnameinfo.c MODULE__SSL_DEPS=$(srcdir)/Modules/_ssl.h $(srcdir)/Modules/_ssl/cert.c $(srcdir)/Modules/_ssl/debughelpers.c $(srcdir)/Modules/_ssl/misc.c $(srcdir)/Modules/_ssl_data.h $(srcdir)/Modules/_ssl_data_111.h $(srcdir)/Modules/_ssl_data_300.h $(srcdir)/Modules/socketmodule.h MODULE__TESTCAPI_DEPS=$(srcdir)/Modules/_testcapi/testcapi_long.h $(srcdir)/Modules/_testcapi/parts.h diff --git a/Misc/NEWS.d/3.10.0a5.rst b/Misc/NEWS.d/3.10.0a5.rst index 1c7c7447cae065..497e3849171831 100644 --- a/Misc/NEWS.d/3.10.0a5.rst +++ b/Misc/NEWS.d/3.10.0a5.rst @@ -499,7 +499,7 @@ Araujo. .. nonce: HY2beA .. section: Documentation -Updated importlib.utils.resolve_name() doc to use __spec__.parent instead of +Updated importlib.util.resolve_name() doc to use __spec__.parent instead of __package__. (Thanks Yair Frid.) .. diff --git a/Misc/NEWS.d/3.12.0a1.rst b/Misc/NEWS.d/3.12.0a1.rst index 075e8da825a331..ff5064f89d8dd8 100644 --- a/Misc/NEWS.d/3.12.0a1.rst +++ b/Misc/NEWS.d/3.12.0a1.rst @@ -2028,8 +2028,8 @@ resources. .. nonce: NzdREm .. section: Library -Remove deprecated :func:`importlib.utils.set_loader` and -:func:`importlib.utils.module_for_loader` from :mod:`importlib.utils`. +Remove deprecated :func:`!importlib.util.set_loader` and +:func:`!importlib.util.module_for_loader` from :mod:`importlib.util`. .. diff --git a/Misc/NEWS.d/3.7.0a1.rst b/Misc/NEWS.d/3.7.0a1.rst index 9bada1b76be7a8..ef93454784b77f 100644 --- a/Misc/NEWS.d/3.7.0a1.rst +++ b/Misc/NEWS.d/3.7.0a1.rst @@ -6255,7 +6255,7 @@ Fix python-gdb.py didn't support new dict implementation. .. section: Tools/Demos The pybench and pystone microbenchmark have been removed from Tools. Please -use the new Python benchmark suite https://github.com/python/performance +use the new Python benchmark suite https://github.com/python/pyperformance which is more reliable and includes a portable version of pybench working on Python 2 and Python 3. diff --git a/Misc/NEWS.d/3.8.0a1.rst b/Misc/NEWS.d/3.8.0a1.rst index db2eba32e6ea34..854458f2d1a994 100644 --- a/Misc/NEWS.d/3.8.0a1.rst +++ b/Misc/NEWS.d/3.8.0a1.rst @@ -3818,7 +3818,7 @@ user. .. section: Library The :2to3fixer:`reload` fixer now uses :func:`importlib.reload` instead of -deprecated :func:`imp.reload`. +deprecated :func:`!imp.reload`. .. diff --git a/Misc/NEWS.d/next/Build/2022-06-20-15-15-11.gh-issue-90656.kFBbKe.rst b/Misc/NEWS.d/next/Build/2022-06-20-15-15-11.gh-issue-90656.kFBbKe.rst new file mode 100644 index 00000000000000..dfe71a5552070d --- /dev/null +++ b/Misc/NEWS.d/next/Build/2022-06-20-15-15-11.gh-issue-90656.kFBbKe.rst @@ -0,0 +1,7 @@ +Add platform triplets for 64-bit LoongArch: + +* loongarch64-linux-gnusf +* loongarch64-linux-gnuf32 +* loongarch64-linux-gnu + +Patch by Zhang Na. diff --git a/Misc/NEWS.d/next/Build/2023-05-04-10-56-14.gh-issue-104106.-W9BJS.rst b/Misc/NEWS.d/next/Build/2023-05-04-10-56-14.gh-issue-104106.-W9BJS.rst new file mode 100644 index 00000000000000..900e5bd61d6033 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2023-05-04-10-56-14.gh-issue-104106.-W9BJS.rst @@ -0,0 +1 @@ +Add gcc fallback of mkfifoat/mknodat for macOS. Patch by Dong-hee Na. diff --git a/Misc/NEWS.d/next/C API/2022-09-15-15-21-34.gh-issue-96803.ynBKIS.rst b/Misc/NEWS.d/next/C API/2022-09-15-15-21-34.gh-issue-96803.ynBKIS.rst new file mode 100644 index 00000000000000..6fc56d2249f581 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2022-09-15-15-21-34.gh-issue-96803.ynBKIS.rst @@ -0,0 +1,6 @@ +Add unstable C-API functions to get the code object, lasti and line number from +the internal ``_PyInterpreterFrame`` in the limited API. The functions are: + +* ``PyCodeObject * PyUnstable_InterpreterFrame_GetCode(struct _PyInterpreterFrame *frame)`` +* ``int PyUnstable_InterpreterFrame_GetLasti(struct _PyInterpreterFrame *frame)`` +* ``int PyUnstable_InterpreterFrame_GetLine(struct _PyInterpreterFrame *frame)`` diff --git a/Misc/NEWS.d/next/C API/2023-04-13-16-54-00.gh-issue-103509.A26Qu8.rst b/Misc/NEWS.d/next/C API/2023-04-13-16-54-00.gh-issue-103509.A26Qu8.rst new file mode 100644 index 00000000000000..af630c3aafa940 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2023-04-13-16-54-00.gh-issue-103509.A26Qu8.rst @@ -0,0 +1,5 @@ +Added C API for extending types whose instance memory layout is opaque: +:c:member:`PyType_Spec.basicsize` can now be zero or negative, +:c:func:`PyObject_GetTypeData` can be used to get subclass-specific data, +and :c:macro:`Py_TPFLAGS_ITEMS_AT_END` can be used to safely extend +variable-size objects. See :pep:`697` for details. diff --git a/Misc/NEWS.d/next/C API/2023-04-24-10-31-59.gh-issue-103743.2xYA1K.rst b/Misc/NEWS.d/next/C API/2023-04-24-10-31-59.gh-issue-103743.2xYA1K.rst new file mode 100644 index 00000000000000..d074350ed3ebbe --- /dev/null +++ b/Misc/NEWS.d/next/C API/2023-04-24-10-31-59.gh-issue-103743.2xYA1K.rst @@ -0,0 +1,2 @@ +Add :c:func:`PyUnstable_Object_GC_NewWithExtraData` function that can be used to +allocate additional memory after an object for data not managed by Python. diff --git a/Misc/NEWS.d/next/C API/2023-04-28-18-04-38.gh-issue-103968.EnVvOx.rst b/Misc/NEWS.d/next/C API/2023-04-28-18-04-38.gh-issue-103968.EnVvOx.rst new file mode 100644 index 00000000000000..5e4270f82afd84 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2023-04-28-18-04-38.gh-issue-103968.EnVvOx.rst @@ -0,0 +1,4 @@ +:c:func:`PyType_FromSpec` and its variants now allow creating classes whose +metaclass overrides :c:member:`~PyTypeObject.tp_new`. The ``tp_new`` is +ignored. This behavior is deprecated and will be disallowed in 3.14+. The +new :c:func:`PyType_FromMetaclass` already disallows it. diff --git a/Misc/NEWS.d/next/C API/2023-05-02-21-05-54.gh-issue-104109.0tnDZV.rst b/Misc/NEWS.d/next/C API/2023-05-02-21-05-54.gh-issue-104109.0tnDZV.rst new file mode 100644 index 00000000000000..2ffc0fa81c014a --- /dev/null +++ b/Misc/NEWS.d/next/C API/2023-05-02-21-05-54.gh-issue-104109.0tnDZV.rst @@ -0,0 +1,5 @@ +We've added ``Py_NewInterpreterFromConfig()`` and ``PyInterpreterConfig`` to +the public C-API (but not the stable ABI; not yet at least). The new +function may be used to create a new interpreter with various features +configured. The function was added to support PEP 684 (per-interpreter +GIL). diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-01-30-15-40-29.gh-issue-97933.nUlp3r.rst b/Misc/NEWS.d/next/Core and Builtins/2023-01-30-15-40-29.gh-issue-97933.nUlp3r.rst new file mode 100644 index 00000000000000..2eec05cb3ace5c --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-01-30-15-40-29.gh-issue-97933.nUlp3r.rst @@ -0,0 +1,2 @@ +:pep:`709`: inline list, dict and set comprehensions to improve performance +and reduce bytecode size. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-03-07-17-37-00.gh-issue-102500.RUSQhz.rst b/Misc/NEWS.d/next/Core and Builtins/2023-03-07-17-37-00.gh-issue-102500.RUSQhz.rst new file mode 100644 index 00000000000000..e03113ba05cd7d --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-03-07-17-37-00.gh-issue-102500.RUSQhz.rst @@ -0,0 +1,3 @@ +Make the buffer protocol accessible in Python code using the new +``__buffer__`` and ``__release_buffer__`` magic methods. See :pep:`688` for +details. Patch by Jelle Zijlstra. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-12-19-55-24.gh-issue-82012.FlcJAh.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-12-19-55-24.gh-issue-82012.FlcJAh.rst new file mode 100644 index 00000000000000..819a2359bf6fae --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-12-19-55-24.gh-issue-82012.FlcJAh.rst @@ -0,0 +1,5 @@ +The bitwise inversion operator (``~``) on bool is deprecated. +It returns the bitwise inversion of the underlying ``int`` representation such that +``bool(~True) == True``, which can be confusing. Use ``not`` for logical negation +of bools. In the rare case that you really need the bitwise inversion of the underlying ``int``, +convert to int explicitly ``~int(x)``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-20-16-17-51.gh-issue-103650.K1MFXR.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-20-16-17-51.gh-issue-103650.K1MFXR.rst new file mode 100644 index 00000000000000..5434660e9d6ffb --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-20-16-17-51.gh-issue-103650.K1MFXR.rst @@ -0,0 +1 @@ +Change the perf map format to remove the '0x' prefix from the addresses diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-25-20-56-01.gh-issue-103845.V7NYFn.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-25-20-56-01.gh-issue-103845.V7NYFn.rst new file mode 100644 index 00000000000000..e8434854cde632 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-25-20-56-01.gh-issue-103845.V7NYFn.rst @@ -0,0 +1 @@ +Remove both line and instruction instrumentation before adding new ones for monitoring, to avoid newly added instrumentation being removed immediately. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-26-15-14-23.gh-issue-103899.1pqKPF.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-26-15-14-23.gh-issue-103899.1pqKPF.rst new file mode 100644 index 00000000000000..c12a6b9cb841f2 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-26-15-14-23.gh-issue-103899.1pqKPF.rst @@ -0,0 +1,3 @@ +Provide a helpful hint in the :exc:`TypeError` message when accidentally +calling a :term:`module` object that has a callable attribute of the same +name (such as :func:`dis.dis` or :class:`datetime.datetime`). diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-05-01-12-03-52.gh-issue-104018.PFxGS4.rst b/Misc/NEWS.d/next/Core and Builtins/2023-05-01-12-03-52.gh-issue-104018.PFxGS4.rst new file mode 100644 index 00000000000000..f3cadaee0e32d9 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-05-01-12-03-52.gh-issue-104018.PFxGS4.rst @@ -0,0 +1 @@ +Disallow the "z" format specifier in %-format of bytes objects. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-05-01-14-48-29.gh-issue-104066.pzoUZQ.rst b/Misc/NEWS.d/next/Core and Builtins/2023-05-01-14-48-29.gh-issue-104066.pzoUZQ.rst new file mode 100644 index 00000000000000..97e0c01689cb6f --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-05-01-14-48-29.gh-issue-104066.pzoUZQ.rst @@ -0,0 +1,2 @@ +Improve the performance of :func:`hasattr` for module objects with a missing +attribute. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-05-01-21-05-47.gh-issue-104078.vRaBsU.rst b/Misc/NEWS.d/next/Core and Builtins/2023-05-01-21-05-47.gh-issue-104078.vRaBsU.rst new file mode 100644 index 00000000000000..6f24529bac3e0c --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-05-01-21-05-47.gh-issue-104078.vRaBsU.rst @@ -0,0 +1 @@ +Improve the performance of :c:func:`PyObject_HasAttrString` diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-05-02-18-29-49.gh-issue-104142._5Et6I.rst b/Misc/NEWS.d/next/Core and Builtins/2023-05-02-18-29-49.gh-issue-104142._5Et6I.rst new file mode 100644 index 00000000000000..6a19ae84057f4c --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-05-02-18-29-49.gh-issue-104142._5Et6I.rst @@ -0,0 +1,2 @@ +Fix an issue where :class:`list` or :class:`tuple` repetition could fail to +respect :pep:`683`. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-05-03-17-46-47.gh-issue-104108.GOxAYt.rst b/Misc/NEWS.d/next/Core and Builtins/2023-05-03-17-46-47.gh-issue-104108.GOxAYt.rst new file mode 100644 index 00000000000000..dad843636493ae --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-05-03-17-46-47.gh-issue-104108.GOxAYt.rst @@ -0,0 +1,6 @@ +Multi-phase init extension modules may now indicate whether or not they +actually support multiple interpreters. By default such modules are +expected to support use in multiple interpreters. In the uncommon case that +one does not, it may use the new ``Py_mod_multiple_interpreters`` module def +slot. A value of ``0`` means the module does not support them. ``1`` means +it does. The default is ``1``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-05-05-12-14-47.gh-issue-99113.-RAdnv.rst b/Misc/NEWS.d/next/Core and Builtins/2023-05-05-12-14-47.gh-issue-99113.-RAdnv.rst new file mode 100644 index 00000000000000..42e26cb27b6e01 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-05-05-12-14-47.gh-issue-99113.-RAdnv.rst @@ -0,0 +1,6 @@ +The GIL is now (optionally) per-interpreter. This is the fundamental change +for PEP 684. This is all made possible by virtue of the isolated state of +each interpreter in the process. The behavior of the main interpreter +remains unchanged. Likewise, interpreters created using +``Py_NewInterpreter()`` are not affected. To get an interpreter with its +own GIL, call ``Py_NewInterpreterFromConfig()``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-05-05-13-18-56.gh-issue-99113.hT1ajK.rst b/Misc/NEWS.d/next/Core and Builtins/2023-05-05-13-18-56.gh-issue-99113.hT1ajK.rst new file mode 100644 index 00000000000000..afd26750846167 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-05-05-13-18-56.gh-issue-99113.hT1ajK.rst @@ -0,0 +1,11 @@ +Multi-phase init extension modules may now indicate that they support +running in subinterpreters that have their own GIL. This is done by using +``Py_MOD_PER_INTERPRETER_GIL_SUPPORTED`` as the value for the +``Py_mod_multiple_interpreters`` module def slot. Otherwise the module, by +default, cannot be imported in such subinterpreters. (This does not affect +the main interpreter or subinterpreters that do not have their own GIL.) In +addition to the isolation that multi-phase init already normally requires, +support for per-interpreter GIL involves one additional constraint: +thread-safety. If the module has external (linked) dependencies and those +libraries have any state that isn't thread-safe then the module must do the +additional work to add thread-safety. This should be an uncommon case. diff --git a/Misc/NEWS.d/next/Library/2021-05-16-14-28-30.bpo-24964.Oa5Ie_.rst b/Misc/NEWS.d/next/Library/2021-05-16-14-28-30.bpo-24964.Oa5Ie_.rst new file mode 100644 index 00000000000000..ba113673b7fbe5 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2021-05-16-14-28-30.bpo-24964.Oa5Ie_.rst @@ -0,0 +1,3 @@ +Added attribute '_proxy_response_headers' to HTTPConnection class. This +attribute contains the headers of the proxy server response to the CONNECT +request. diff --git a/Misc/NEWS.d/next/Library/2021-11-19-23-37-18.bpo-45606.UW5XE1.rst b/Misc/NEWS.d/next/Library/2021-11-19-23-37-18.bpo-45606.UW5XE1.rst new file mode 100644 index 00000000000000..531f4729220036 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2021-11-19-23-37-18.bpo-45606.UW5XE1.rst @@ -0,0 +1,5 @@ +Fixed the bug in :meth:`pathlib.Path.glob` -- previously a dangling symlink +would not be found by this method when the pattern is an exact match, but +would be found when the pattern contains a wildcard or the recursive +wildcard (``**``). With this change, a dangling symlink will be found in +both cases. diff --git a/Misc/NEWS.d/next/Library/2022-02-19-14-19-34.bpo-46797.6BXZX4.rst b/Misc/NEWS.d/next/Library/2022-02-19-14-19-34.bpo-46797.6BXZX4.rst new file mode 100644 index 00000000000000..6539efbc9d0eb0 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-02-19-14-19-34.bpo-46797.6BXZX4.rst @@ -0,0 +1,4 @@ +Deprecation warnings are now emitted for :class:`!ast.Num`, +:class:`!ast.Bytes`, :class:`!ast.Str`, :class:`!ast.NameConstant` and +:class:`!ast.Ellipsis`. These have been documented as deprecated since Python +3.8, and will be removed in Python 3.14. diff --git a/Misc/NEWS.d/next/Library/2022-09-03-09-24-02.gh-issue-96534.EU4Oxv.rst b/Misc/NEWS.d/next/Library/2022-09-03-09-24-02.gh-issue-96534.EU4Oxv.rst new file mode 100644 index 00000000000000..0497d9eb69163e --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-09-03-09-24-02.gh-issue-96534.EU4Oxv.rst @@ -0,0 +1 @@ +Support divert(4) added in FreeBSD 14. diff --git a/Misc/NEWS.d/next/Library/2022-10-09-14-47-42.gh-issue-98040.IN3qab.rst b/Misc/NEWS.d/next/Library/2022-10-09-14-47-42.gh-issue-98040.IN3qab.rst new file mode 100644 index 00000000000000..ac185406844153 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-10-09-14-47-42.gh-issue-98040.IN3qab.rst @@ -0,0 +1,2 @@ +Remove more deprecated importlib APIs: ``find_loader()``, ``find_module()``, +``importlib.abc.Finder``, ``pkgutil.ImpImporter``, ``pkgutil.ImpLoader``. diff --git a/Misc/NEWS.d/next/Library/2022-10-21-16-23-31.gh-issue-97850.N46coo.rst b/Misc/NEWS.d/next/Library/2022-10-21-16-23-31.gh-issue-97850.N46coo.rst new file mode 100644 index 00000000000000..e3297d164fff6d --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-10-21-16-23-31.gh-issue-97850.N46coo.rst @@ -0,0 +1,2 @@ +Deprecate :func:`pkgutil.find_loader` and :func:`pkgutil.get_loader` +in favor of :func:`importlib.util.find_spec`. diff --git a/Misc/NEWS.d/next/Library/2023-01-22-14-53-12.gh-issue-89550.c1U23f.rst b/Misc/NEWS.d/next/Library/2023-01-22-14-53-12.gh-issue-89550.c1U23f.rst new file mode 100644 index 00000000000000..556db0eae00c0b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-01-22-14-53-12.gh-issue-89550.c1U23f.rst @@ -0,0 +1,2 @@ +Decrease execution time of some :mod:`gzip` file writes by 15% by +adding more appropriate buffering. diff --git a/Misc/NEWS.d/next/Library/2023-02-09-22-24-34.gh-issue-101640.oFuEpB.rst b/Misc/NEWS.d/next/Library/2023-02-09-22-24-34.gh-issue-101640.oFuEpB.rst new file mode 100644 index 00000000000000..917cf0f97b9e06 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-02-09-22-24-34.gh-issue-101640.oFuEpB.rst @@ -0,0 +1 @@ +:class:`argparse.ArgumentParser` now catches errors when writing messages, such as when :data:`sys.stderr` is ``None``. Patch by Oleg Iarygin. diff --git a/Misc/NEWS.d/next/Library/2023-03-08-02-45-46.gh-issue-91896.kgON_a.rst b/Misc/NEWS.d/next/Library/2023-03-08-02-45-46.gh-issue-91896.kgON_a.rst new file mode 100644 index 00000000000000..b5282d3d612916 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-03-08-02-45-46.gh-issue-91896.kgON_a.rst @@ -0,0 +1 @@ +Deprecate :class:`collections.abc.ByteString` diff --git a/Misc/NEWS.d/next/Library/2023-03-15-00-37-43.gh-issue-81079.heTAod.rst b/Misc/NEWS.d/next/Library/2023-03-15-00-37-43.gh-issue-81079.heTAod.rst new file mode 100644 index 00000000000000..ef5690533985d5 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-03-15-00-37-43.gh-issue-81079.heTAod.rst @@ -0,0 +1,2 @@ +Add *case_sensitive* keyword-only argument to :meth:`pathlib.Path.glob` and +:meth:`~pathlib.Path.rglob`. diff --git a/Misc/NEWS.d/next/Library/2023-03-15-12-18-07.gh-issue-97696.DtnpIC.rst b/Misc/NEWS.d/next/Library/2023-03-15-12-18-07.gh-issue-97696.DtnpIC.rst new file mode 100644 index 00000000000000..0b3854d74eb991 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-03-15-12-18-07.gh-issue-97696.DtnpIC.rst @@ -0,0 +1,6 @@ +Implemented an eager task factory in asyncio. +When used as a task factory on an event loop, it performs eager execution of +coroutines. Coroutines that are able to complete synchronously (e.g. return or +raise without blocking) are returned immediately as a finished task, and the +task is never scheduled to the event loop. If the coroutine blocks, the +(pending) task is scheduled and returned. diff --git a/Misc/NEWS.d/next/Library/2023-04-03-22-02-35.gh-issue-100479.kNBjQm.rst b/Misc/NEWS.d/next/Library/2023-04-03-22-02-35.gh-issue-100479.kNBjQm.rst new file mode 100644 index 00000000000000..58db90480d2ff0 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-03-22-02-35.gh-issue-100479.kNBjQm.rst @@ -0,0 +1,4 @@ +Add :meth:`pathlib.PurePath.with_segments`, which creates a path object from +arguments. This method is called whenever a derivative path is created, such +as from :attr:`pathlib.PurePath.parent`. Subclasses may override this method +to share information between path objects. diff --git a/Misc/NEWS.d/next/Library/2023-04-12-13-04-16.gh-issue-103472.C6bOHv.rst b/Misc/NEWS.d/next/Library/2023-04-12-13-04-16.gh-issue-103472.C6bOHv.rst new file mode 100644 index 00000000000000..01d84f024bd4a6 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-12-13-04-16.gh-issue-103472.C6bOHv.rst @@ -0,0 +1,2 @@ +Avoid a potential :exc:`ResourceWarning` in :class:`http.client.HTTPConnection` +by closing the proxy / tunnel's CONNECT response explicitly. diff --git a/Misc/NEWS.d/next/Library/2023-04-13-19-43-15.gh-issue-103525.uY4VYg.rst b/Misc/NEWS.d/next/Library/2023-04-13-19-43-15.gh-issue-103525.uY4VYg.rst new file mode 100644 index 00000000000000..1414cb07dd9155 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-13-19-43-15.gh-issue-103525.uY4VYg.rst @@ -0,0 +1,2 @@ +Fix misleading exception message when mixed ``str`` and ``bytes`` arguments +are supplied to :class:`pathlib.PurePath` and :class:`~pathlib.Path`. diff --git a/Misc/NEWS.d/next/Library/2023-04-14-06-32-54.gh-issue-103533.n_AfcS.rst b/Misc/NEWS.d/next/Library/2023-04-14-06-32-54.gh-issue-103533.n_AfcS.rst new file mode 100644 index 00000000000000..1008ea076c71a0 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-14-06-32-54.gh-issue-103533.n_AfcS.rst @@ -0,0 +1 @@ +Update :mod:`cProfile` to use PEP 669 API diff --git a/Misc/NEWS.d/next/Library/2023-04-14-21-16-05.gh-issue-103548.lagdpp.rst b/Misc/NEWS.d/next/Library/2023-04-14-21-16-05.gh-issue-103548.lagdpp.rst new file mode 100644 index 00000000000000..238f2868867472 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-14-21-16-05.gh-issue-103548.lagdpp.rst @@ -0,0 +1,4 @@ +Improve performance of :meth:`pathlib.Path.absolute` and +:meth:`~pathlib.Path.cwd` by joining paths only when necessary. Also improve +performance of :meth:`pathlib.PurePath.is_absolute` on Posix by skipping path +parsing and normalization. diff --git a/Misc/NEWS.d/next/Library/2023-04-19-16-08-53.gh-issue-84976.HwbzlD.rst b/Misc/NEWS.d/next/Library/2023-04-19-16-08-53.gh-issue-84976.HwbzlD.rst new file mode 100644 index 00000000000000..8658627aeba434 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-19-16-08-53.gh-issue-84976.HwbzlD.rst @@ -0,0 +1,5 @@ +Create a new ``Lib/_pydatetime.py`` file that defines the Python version of +the ``datetime`` module, and make ``datetime`` import the contents of the +new library only if the C implementation is missing. Currently, the full +Python implementation is defined and then deleted if the C implementation is +not available, slowing down ``import datetime`` unnecessarily. diff --git a/Misc/NEWS.d/next/Library/2023-04-22-21-34-13.gh-issue-103693.SBtuLQ.rst b/Misc/NEWS.d/next/Library/2023-04-22-21-34-13.gh-issue-103693.SBtuLQ.rst new file mode 100644 index 00000000000000..52c68bfc9ceea4 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-22-21-34-13.gh-issue-103693.SBtuLQ.rst @@ -0,0 +1 @@ +Add convenience variable feature to :mod:`pdb` diff --git a/Misc/NEWS.d/next/Library/2023-04-27-00-45-41.gh-issue-100370.MgZ3KY.rst b/Misc/NEWS.d/next/Library/2023-04-27-00-45-41.gh-issue-100370.MgZ3KY.rst new file mode 100644 index 00000000000000..9022d55c48cb11 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-27-00-45-41.gh-issue-100370.MgZ3KY.rst @@ -0,0 +1,2 @@ +Fix potential :exc:`OverflowError` in :meth:`sqlite3.Connection.blobopen` +for 32-bit builds. Patch by Erlend E. Aasland. diff --git a/Misc/NEWS.d/next/Library/2023-04-27-18-46-31.gh-issue-68968.E3tnhy.rst b/Misc/NEWS.d/next/Library/2023-04-27-18-46-31.gh-issue-68968.E3tnhy.rst new file mode 100644 index 00000000000000..bf29b64793b933 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-27-18-46-31.gh-issue-68968.E3tnhy.rst @@ -0,0 +1 @@ +Fixed garbled output of :meth:`~unittest.TestCase.assertEqual` when an input lacks final newline. diff --git a/Misc/NEWS.d/next/Library/2023-05-01-16-43-28.gh-issue-104035.MrJBw8.rst b/Misc/NEWS.d/next/Library/2023-05-01-16-43-28.gh-issue-104035.MrJBw8.rst new file mode 100644 index 00000000000000..8c8e3d6ba5fbc1 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-05-01-16-43-28.gh-issue-104035.MrJBw8.rst @@ -0,0 +1,2 @@ +Do not ignore user-defined ``__getstate__`` and ``__setstate__`` methods for +slotted frozen dataclasses. diff --git a/Misc/NEWS.d/next/Library/2023-05-01-17-58-28.gh-issue-103963.XWlHx7.rst b/Misc/NEWS.d/next/Library/2023-05-01-17-58-28.gh-issue-103963.XWlHx7.rst new file mode 100644 index 00000000000000..cb06ad5d22e8a9 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-05-01-17-58-28.gh-issue-103963.XWlHx7.rst @@ -0,0 +1 @@ +Make :mod:`dis` display the names of the args for :opcode:`CALL_INTRINSIC_*`. diff --git a/Misc/NEWS.d/next/Library/2023-05-01-19-10-05.gh-issue-103629.81bpZz.rst b/Misc/NEWS.d/next/Library/2023-05-01-19-10-05.gh-issue-103629.81bpZz.rst new file mode 100644 index 00000000000000..7971ab66359c3d --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-05-01-19-10-05.gh-issue-103629.81bpZz.rst @@ -0,0 +1 @@ +Update the ``repr`` of :class:`typing.Unpack` according to :pep:`692`. diff --git a/Misc/NEWS.d/next/Library/2023-05-02-04-49-45.gh-issue-103822.m0QdAO.rst b/Misc/NEWS.d/next/Library/2023-05-02-04-49-45.gh-issue-103822.m0QdAO.rst new file mode 100644 index 00000000000000..3daf9cc093807b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-05-02-04-49-45.gh-issue-103822.m0QdAO.rst @@ -0,0 +1 @@ +Update the return type of ``weekday`` to the newly added Day attribute diff --git a/Misc/NEWS.d/next/Library/2023-05-02-20-43-03.gh-issue-104102.vgSdEJ.rst b/Misc/NEWS.d/next/Library/2023-05-02-20-43-03.gh-issue-104102.vgSdEJ.rst new file mode 100644 index 00000000000000..7101de908a5004 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-05-02-20-43-03.gh-issue-104102.vgSdEJ.rst @@ -0,0 +1,2 @@ +Improve performance of :meth:`pathlib.Path.glob` when evaluating patterns +that contain ``'../'`` segments. diff --git a/Misc/NEWS.d/next/Library/2023-05-02-21-05-30.gh-issue-104104.9tjplT.rst b/Misc/NEWS.d/next/Library/2023-05-02-21-05-30.gh-issue-104104.9tjplT.rst new file mode 100644 index 00000000000000..935a0e2a2bff18 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-05-02-21-05-30.gh-issue-104104.9tjplT.rst @@ -0,0 +1,2 @@ +Improve performance of :meth:`pathlib.Path.glob` by using +:data:`re.IGNORECASE` to implement case-insensitive matching. diff --git a/Misc/NEWS.d/next/Library/2023-05-03-03-14-33.gh-issue-104114.RG26RD.rst b/Misc/NEWS.d/next/Library/2023-05-03-03-14-33.gh-issue-104114.RG26RD.rst new file mode 100644 index 00000000000000..e705fea8326e7a --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-05-03-03-14-33.gh-issue-104114.RG26RD.rst @@ -0,0 +1,3 @@ +Fix issue where :meth:`pathlib.Path.glob` returns paths using the case of +non-wildcard segments for corresponding path segments, rather than the real +filesystem case. diff --git a/Misc/NEWS.d/next/Library/2023-05-03-16-50-24.gh-issue-104144.yNkjL8.rst b/Misc/NEWS.d/next/Library/2023-05-03-16-50-24.gh-issue-104144.yNkjL8.rst new file mode 100644 index 00000000000000..b975d48ed3385c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-05-03-16-50-24.gh-issue-104144.yNkjL8.rst @@ -0,0 +1,3 @@ +Optimize :func:`asyncio.gather` when using :func:`asyncio.eager_task_factory` +to complete eagerly if all fututres completed eagerly. +Avoid scheduling done callbacks for futures that complete eagerly. diff --git a/Misc/NEWS.d/next/Library/2023-05-03-16-51-53.gh-issue-104144.653Q0P.rst b/Misc/NEWS.d/next/Library/2023-05-03-16-51-53.gh-issue-104144.653Q0P.rst new file mode 100644 index 00000000000000..ced3b7cea04954 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-05-03-16-51-53.gh-issue-104144.653Q0P.rst @@ -0,0 +1,2 @@ +Optimize :class:`asyncio.TaskGroup` when using :func:`asyncio.eager_task_factory`. +Skip scheduling a done callback if a TaskGroup task completes eagerly. diff --git a/Misc/NEWS.d/next/Library/2023-05-05-18-52-22.gh-issue-65772.w5P5Wv.rst b/Misc/NEWS.d/next/Library/2023-05-05-18-52-22.gh-issue-65772.w5P5Wv.rst new file mode 100644 index 00000000000000..54b0190192863c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-05-05-18-52-22.gh-issue-65772.w5P5Wv.rst @@ -0,0 +1 @@ +Remove unneeded comments and code in turtle.py. diff --git a/Misc/NEWS.d/next/Library/2023-05-06-20-37-46.gh-issue-102613.QZG9iX.rst b/Misc/NEWS.d/next/Library/2023-05-06-20-37-46.gh-issue-102613.QZG9iX.rst new file mode 100644 index 00000000000000..01f8b948d2cb65 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-05-06-20-37-46.gh-issue-102613.QZG9iX.rst @@ -0,0 +1,3 @@ +Improve performance of :meth:`pathlib.Path.glob` when expanding recursive +wildcards ("``**``") by merging adjacent wildcards and de-duplicating +results only when necessary. diff --git a/Misc/NEWS.d/next/Library/2023-05-07-19-56-45.gh-issue-104265.fVblry.rst b/Misc/NEWS.d/next/Library/2023-05-07-19-56-45.gh-issue-104265.fVblry.rst new file mode 100644 index 00000000000000..9c582844bf909b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-05-07-19-56-45.gh-issue-104265.fVblry.rst @@ -0,0 +1,4 @@ +Prevent possible crash by disallowing instantiation of the +:class:`!_csv.Reader` and :class:`!_csv.Writer` types. +The regression was introduced in 3.10.0a4 with PR 23224 (:issue:`14935`). +Patch by Radislav Chugunov. diff --git a/Misc/NEWS.d/next/Library/2023-05-08-15-50-59.gh-issue-104310.fXVSPY.rst b/Misc/NEWS.d/next/Library/2023-05-08-15-50-59.gh-issue-104310.fXVSPY.rst new file mode 100644 index 00000000000000..3743d569995f2e --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-05-08-15-50-59.gh-issue-104310.fXVSPY.rst @@ -0,0 +1,3 @@ +Users may now use ``importlib.util.allowing_all_extensions()`` (a context +manager) to temporarily disable the strict compatibility checks for +importing extension modules in subinterpreters. diff --git a/Misc/NEWS.d/next/Library/2023-05-08-20-57-17.gh-issue-104307.DSB93G.rst b/Misc/NEWS.d/next/Library/2023-05-08-20-57-17.gh-issue-104307.DSB93G.rst new file mode 100644 index 00000000000000..03775845450caa --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-05-08-20-57-17.gh-issue-104307.DSB93G.rst @@ -0,0 +1 @@ +:func:`socket.getnameinfo` now releases the GIL while contacting the DNS server diff --git a/Misc/NEWS.d/next/Library/2023-05-08-23-01-59.gh-issue-104139.83Tnt-.rst b/Misc/NEWS.d/next/Library/2023-05-08-23-01-59.gh-issue-104139.83Tnt-.rst new file mode 100644 index 00000000000000..145e75f6dea6f4 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-05-08-23-01-59.gh-issue-104139.83Tnt-.rst @@ -0,0 +1,3 @@ +Teach :func:`urllib.parse.unsplit` to retain the ``"//"`` when assembling +``itms-services://?action=generate-bugs`` style `Apple Platform Deployment +`_ URLs. diff --git a/Misc/NEWS.d/next/Security/2023-04-17-14-38-12.gh-issue-99108.720lG8.rst b/Misc/NEWS.d/next/Security/2023-04-17-14-38-12.gh-issue-99108.720lG8.rst new file mode 100644 index 00000000000000..f259acf753831c --- /dev/null +++ b/Misc/NEWS.d/next/Security/2023-04-17-14-38-12.gh-issue-99108.720lG8.rst @@ -0,0 +1,2 @@ +Upgrade built-in :mod:`hashlib` SHA3 implementation to a verified implementation +from the ``HACL*`` project. Used when OpenSSL is not present or lacks SHA3. diff --git a/Misc/NEWS.d/next/Security/2023-05-01-15-03-25.gh-issue-104049.b01Y3g.rst b/Misc/NEWS.d/next/Security/2023-05-01-15-03-25.gh-issue-104049.b01Y3g.rst new file mode 100644 index 00000000000000..969deb26bfeb95 --- /dev/null +++ b/Misc/NEWS.d/next/Security/2023-05-01-15-03-25.gh-issue-104049.b01Y3g.rst @@ -0,0 +1,2 @@ +Do not expose the local on-disk location in directory indexes +produced by :class:`http.client.SimpleHTTPRequestHandler`. diff --git a/Misc/NEWS.d/next/Security/2023-05-02-17-56-32.gh-issue-99889.l664SU.rst b/Misc/NEWS.d/next/Security/2023-05-02-17-56-32.gh-issue-99889.l664SU.rst new file mode 100644 index 00000000000000..b7002e81b6b677 --- /dev/null +++ b/Misc/NEWS.d/next/Security/2023-05-02-17-56-32.gh-issue-99889.l664SU.rst @@ -0,0 +1,2 @@ +Fixed a security in flaw in :func:`uu.decode` that could allow for +directory traversal based on the input if no ``out_file`` was specified. diff --git a/Misc/NEWS.d/next/Tests/2023-03-17-22-00-47.gh-issue-102795.z21EoC.rst b/Misc/NEWS.d/next/Tests/2023-03-17-22-00-47.gh-issue-102795.z21EoC.rst new file mode 100644 index 00000000000000..fe2afff91ece7a --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2023-03-17-22-00-47.gh-issue-102795.z21EoC.rst @@ -0,0 +1 @@ +fix use of poll in test_epoll's test_control_and_wait diff --git a/Misc/NEWS.d/next/Windows/2023-03-24-11-25-28.gh-issue-102997.dredy2.rst b/Misc/NEWS.d/next/Windows/2023-03-24-11-25-28.gh-issue-102997.dredy2.rst new file mode 100644 index 00000000000000..c8f7259aecba6f --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2023-03-24-11-25-28.gh-issue-102997.dredy2.rst @@ -0,0 +1 @@ +Update Windows installer to use SQLite 3.41.2. diff --git a/Misc/NEWS.d/next/macOS/2023-03-24-11-20-47.gh-issue-102997.ZgQkbq.rst b/Misc/NEWS.d/next/macOS/2023-03-24-11-20-47.gh-issue-102997.ZgQkbq.rst new file mode 100644 index 00000000000000..d0b390a896b743 --- /dev/null +++ b/Misc/NEWS.d/next/macOS/2023-03-24-11-20-47.gh-issue-102997.ZgQkbq.rst @@ -0,0 +1 @@ +Update macOS installer to SQLite 3.41.2. diff --git a/Misc/NEWS.d/next/macOS/2023-05-04-21-47-59.gh-issue-104180.lEJCwd.rst b/Misc/NEWS.d/next/macOS/2023-05-04-21-47-59.gh-issue-104180.lEJCwd.rst new file mode 100644 index 00000000000000..b6b18dcfd81394 --- /dev/null +++ b/Misc/NEWS.d/next/macOS/2023-05-04-21-47-59.gh-issue-104180.lEJCwd.rst @@ -0,0 +1,2 @@ +Support reading SOCKS proxy configuration from macOS System Configuration. +Patch by Sam Schott. diff --git a/Misc/stable_abi.toml b/Misc/stable_abi.toml index 23baeeeae79193..48299e9b35ff97 100644 --- a/Misc/stable_abi.toml +++ b/Misc/stable_abi.toml @@ -2397,3 +2397,12 @@ added = '3.12' # Before 3.12, available in "structmember.h" w/o Py_ prefix [const.Py_AUDIT_READ] added = '3.12' # Before 3.12, available in "structmember.h" + +[function.PyObject_GetTypeData] + added = '3.12' +[function.PyType_GetTypeDataSize] + added = '3.12' +[const.Py_RELATIVE_OFFSET] + added = '3.12' +[const.Py_TPFLAGS_ITEMS_AT_END] + added = '3.12' diff --git a/Modules/Setup b/Modules/Setup index 1c6f2f7ea5182d..312e99fea530dc 100644 --- a/Modules/Setup +++ b/Modules/Setup @@ -166,7 +166,7 @@ PYTHONPATH=$(COREPYTHONPATH) #_md5 md5module.c -I$(srcdir)/Modules/_hacl/include _hacl/Hacl_Hash_MD5.c -D_BSD_SOURCE -D_DEFAULT_SOURCE #_sha1 sha1module.c -I$(srcdir)/Modules/_hacl/include _hacl/Hacl_Hash_SHA1.c -D_BSD_SOURCE -D_DEFAULT_SOURCE #_sha2 sha2module.c -I$(srcdir)/Modules/_hacl/include Modules/_hacl/libHacl_Streaming_SHA2.a -#_sha3 _sha3/sha3module.c +#_sha3 sha3module.c -I$(srcdir)/Modules/_hacl/include _hacl/Hacl_Hash_SHA3.c -D_BSD_SOURCE -D_DEFAULT_SOURCE # text encodings and unicode #_codecs_cn cjkcodecs/_codecs_cn.c diff --git a/Modules/Setup.stdlib.in b/Modules/Setup.stdlib.in index fe1b9f8f5380c1..8e66576b5c5f00 100644 --- a/Modules/Setup.stdlib.in +++ b/Modules/Setup.stdlib.in @@ -80,7 +80,7 @@ @MODULE__MD5_TRUE@_md5 md5module.c -I$(srcdir)/Modules/_hacl/include _hacl/Hacl_Hash_MD5.c -D_BSD_SOURCE -D_DEFAULT_SOURCE @MODULE__SHA1_TRUE@_sha1 sha1module.c -I$(srcdir)/Modules/_hacl/include _hacl/Hacl_Hash_SHA1.c -D_BSD_SOURCE -D_DEFAULT_SOURCE @MODULE__SHA2_TRUE@_sha2 sha2module.c -I$(srcdir)/Modules/_hacl/include Modules/_hacl/libHacl_Streaming_SHA2.a -@MODULE__SHA3_TRUE@_sha3 _sha3/sha3module.c +@MODULE__SHA3_TRUE@_sha3 sha3module.c -I$(srcdir)/Modules/_hacl/include _hacl/Hacl_Hash_SHA3.c -D_BSD_SOURCE -D_DEFAULT_SOURCE @MODULE__BLAKE2_TRUE@_blake2 _blake2/blake2module.c _blake2/blake2b_impl.c _blake2/blake2s_impl.c ############################################################################ @@ -169,7 +169,7 @@ @MODULE__XXTESTFUZZ_TRUE@_xxtestfuzz _xxtestfuzz/_xxtestfuzz.c _xxtestfuzz/fuzzer.c @MODULE__TESTBUFFER_TRUE@_testbuffer _testbuffer.c @MODULE__TESTINTERNALCAPI_TRUE@_testinternalcapi _testinternalcapi.c -@MODULE__TESTCAPI_TRUE@_testcapi _testcapimodule.c _testcapi/vectorcall.c _testcapi/vectorcall_limited.c _testcapi/heaptype.c _testcapi/unicode.c _testcapi/getargs.c _testcapi/pytime.c _testcapi/datetime.c _testcapi/docstring.c _testcapi/mem.c _testcapi/watchers.c _testcapi/long.c _testcapi/float.c _testcapi/structmember.c _testcapi/exceptions.c _testcapi/code.c _testcapi/pyos.c +@MODULE__TESTCAPI_TRUE@_testcapi _testcapimodule.c _testcapi/vectorcall.c _testcapi/vectorcall_limited.c _testcapi/heaptype.c _testcapi/unicode.c _testcapi/getargs.c _testcapi/pytime.c _testcapi/datetime.c _testcapi/docstring.c _testcapi/mem.c _testcapi/watchers.c _testcapi/long.c _testcapi/float.c _testcapi/structmember.c _testcapi/exceptions.c _testcapi/code.c _testcapi/buffer.c _testcapi/pyos.c _testcapi/immortal.c _testcapi/heaptype_relative.c @MODULE__TESTCLINIC_TRUE@_testclinic _testclinic.c # Some testing modules MUST be built as shared libraries. diff --git a/Modules/_abc.c b/Modules/_abc.c index 9d6654b4e58aad..d3e405dadb664a 100644 --- a/Modules/_abc.c +++ b/Modules/_abc.c @@ -7,6 +7,7 @@ #include "pycore_moduleobject.h" // _PyModule_GetState() #include "pycore_object.h" // _PyType_GetSubclasses() #include "pycore_runtime.h" // _Py_ID() +#include "pycore_typeobject.h" // _PyType_GetMRO() #include "clinic/_abc.c.h" /*[clinic input] @@ -452,7 +453,8 @@ _abc__abc_init(PyObject *module, PyObject *self) * their special status w.r.t. pattern matching. */ if (PyType_Check(self)) { PyTypeObject *cls = (PyTypeObject *)self; - PyObject *flags = PyDict_GetItemWithError(cls->tp_dict, + PyObject *dict = _PyType_GetDict(cls); + PyObject *flags = PyDict_GetItemWithError(dict, &_Py_ID(__abc_tpflags__)); if (flags == NULL) { if (PyErr_Occurred()) { @@ -471,7 +473,7 @@ _abc__abc_init(PyObject *module, PyObject *self) } ((PyTypeObject *)self)->tp_flags |= (val & COLLECTION_FLAGS); } - if (PyDict_DelItem(cls->tp_dict, &_Py_ID(__abc_tpflags__)) < 0) { + if (PyDict_DelItem(dict, &_Py_ID(__abc_tpflags__)) < 0) { return NULL; } } @@ -742,7 +744,7 @@ _abc__abc_subclasscheck_impl(PyObject *module, PyObject *self, Py_DECREF(ok); /* 4. Check if it's a direct subclass. */ - PyObject *mro = ((PyTypeObject *)subclass)->tp_mro; + PyObject *mro = _PyType_GetMRO((PyTypeObject *)subclass); assert(PyTuple_Check(mro)); for (pos = 0; pos < PyTuple_GET_SIZE(mro); pos++) { PyObject *mro_item = PyTuple_GET_ITEM(mro, pos); @@ -942,6 +944,7 @@ _abcmodule_free(void *module) static PyModuleDef_Slot _abcmodule_slots[] = { {Py_mod_exec, _abcmodule_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c index 82dbc087322aa9..3830245abe87b3 100644 --- a/Modules/_asynciomodule.c +++ b/Modules/_asynciomodule.c @@ -17,6 +17,10 @@ module _asyncio /*[clinic end generated code: output=da39a3ee5e6b4b0d input=8fd17862aa989c69]*/ +#define FI_FREELIST_MAXLEN 255 + +typedef struct futureiterobject futureiterobject; + /* State of the _asyncio module */ typedef struct { PyTypeObject *FutureIterType; @@ -31,8 +35,11 @@ typedef struct { all running event loops. {EventLoop: Task} */ PyObject *current_tasks; - /* WeakSet containing all alive tasks. */ - PyObject *all_tasks; + /* WeakSet containing all tasks scheduled to run on event loops. */ + PyObject *scheduled_tasks; + + /* Set containing all eagerly executing tasks. */ + PyObject *eager_tasks; /* An isinstance type cache for the 'is_coroutine()' function. */ PyObject *iscoroutine_typecache; @@ -63,6 +70,9 @@ typedef struct { /* Counter for autogenerated Task names */ uint64_t task_name_counter; + + futureiterobject *fi_freelist; + Py_ssize_t fi_freelist_len; } asyncio_state; static inline asyncio_state * @@ -156,6 +166,9 @@ class _asyncio.Future "FutureObj *" "&Future_Type" /* Get FutureIter from Future */ static PyObject * future_new_iter(PyObject *); +static PyObject * +task_step_handle_result_impl(asyncio_state *state, TaskObj *task, PyObject *result); + static int _is_coroutine(asyncio_state *state, PyObject *coro) @@ -1567,28 +1580,24 @@ FutureObj_dealloc(PyObject *self) /*********************** Future Iterator **************************/ -typedef struct { +typedef struct futureiterobject { PyObject_HEAD FutureObj *future; } futureiterobject; -#define FI_FREELIST_MAXLEN 255 -static futureiterobject *fi_freelist = NULL; -static Py_ssize_t fi_freelist_len = 0; - - static void FutureIter_dealloc(futureiterobject *it) { PyTypeObject *tp = Py_TYPE(it); + asyncio_state *state = get_asyncio_state_by_def((PyObject *)it); PyObject_GC_UnTrack(it); tp->tp_clear((PyObject *)it); - if (fi_freelist_len < FI_FREELIST_MAXLEN) { - fi_freelist_len++; - it->future = (FutureObj*) fi_freelist; - fi_freelist = it; + if (state->fi_freelist_len < FI_FREELIST_MAXLEN) { + state->fi_freelist_len++; + it->future = (FutureObj*) state->fi_freelist; + state->fi_freelist = it; } else { PyObject_GC_Del(it); @@ -1792,17 +1801,12 @@ future_new_iter(PyObject *fut) futureiterobject *it; asyncio_state *state = get_asyncio_state_by_def((PyObject *)fut); - if (!Future_Check(state, fut)) { - PyErr_BadInternalCall(); - return NULL; - } - ENSURE_FUTURE_ALIVE(state, fut) - if (fi_freelist_len) { - fi_freelist_len--; - it = fi_freelist; - fi_freelist = (futureiterobject*) it->future; + if (state->fi_freelist_len) { + state->fi_freelist_len--; + it = state->fi_freelist; + state->fi_freelist = (futureiterobject*) it->future; it->future = NULL; _Py_NewReference((PyObject*) it); } @@ -1830,6 +1834,7 @@ class _asyncio.Task "TaskObj *" "&Task_Type" static int task_call_step_soon(asyncio_state *state, TaskObj *, PyObject *); static PyObject * task_wakeup(TaskObj *, PyObject *); static PyObject * task_step(asyncio_state *, TaskObj *, PyObject *); +static int task_eager_start(asyncio_state *state, TaskObj *task); /* ----- Task._step wrapper */ @@ -1940,7 +1945,7 @@ static PyMethodDef TaskWakeupDef = { static int register_task(asyncio_state *state, PyObject *task) { - PyObject *res = PyObject_CallMethodOneArg(state->all_tasks, + PyObject *res = PyObject_CallMethodOneArg(state->scheduled_tasks, &_Py_ID(add), task); if (res == NULL) { return -1; @@ -1949,11 +1954,16 @@ register_task(asyncio_state *state, PyObject *task) return 0; } +static int +register_eager_task(asyncio_state *state, PyObject *task) +{ + return PySet_Add(state->eager_tasks, task); +} static int unregister_task(asyncio_state *state, PyObject *task) { - PyObject *res = PyObject_CallMethodOneArg(state->all_tasks, + PyObject *res = PyObject_CallMethodOneArg(state->scheduled_tasks, &_Py_ID(discard), task); if (res == NULL) { return -1; @@ -1962,6 +1972,11 @@ unregister_task(asyncio_state *state, PyObject *task) return 0; } +static int +unregister_eager_task(asyncio_state *state, PyObject *task) +{ + return PySet_Discard(state->eager_tasks, task); +} static int enter_task(asyncio_state *state, PyObject *loop, PyObject *task) @@ -2015,6 +2030,39 @@ leave_task(asyncio_state *state, PyObject *loop, PyObject *task) return _PyDict_DelItem_KnownHash(state->current_tasks, loop, hash); } +static PyObject * +swap_current_task(asyncio_state *state, PyObject *loop, PyObject *task) +{ + PyObject *prev_task; + Py_hash_t hash; + hash = PyObject_Hash(loop); + if (hash == -1) { + return NULL; + } + + prev_task = _PyDict_GetItem_KnownHash(state->current_tasks, loop, hash); + if (prev_task == NULL) { + if (PyErr_Occurred()) { + return NULL; + } + prev_task = Py_None; + } + + if (task == Py_None) { + if (_PyDict_DelItem_KnownHash(state->current_tasks, loop, hash) == -1) { + return NULL; + } + } else { + if (_PyDict_SetItem_KnownHash(state->current_tasks, loop, task, hash) == -1) { + return NULL; + } + } + + Py_INCREF(prev_task); + + return prev_task; +} + /* ----- Task */ /*[clinic input] @@ -2025,15 +2073,16 @@ _asyncio.Task.__init__ loop: object = None name: object = None context: object = None + eager_start: bool = False A coroutine wrapped in a Future. [clinic start generated code]*/ static int _asyncio_Task___init___impl(TaskObj *self, PyObject *coro, PyObject *loop, - PyObject *name, PyObject *context) -/*[clinic end generated code: output=49ac96fe33d0e5c7 input=924522490c8ce825]*/ - + PyObject *name, PyObject *context, + int eager_start) +/*[clinic end generated code: output=7aced2d27836f1a1 input=18e3f113a51b829d]*/ { if (future_init((FutureObj*)self, loop)) { return -1; @@ -2083,6 +2132,21 @@ _asyncio_Task___init___impl(TaskObj *self, PyObject *coro, PyObject *loop, return -1; } + if (eager_start) { + PyObject *res = PyObject_CallMethodNoArgs(loop, &_Py_ID(is_running)); + if (res == NULL) { + return -1; + } + int is_loop_running = Py_IsTrue(res); + Py_DECREF(res); + if (is_loop_running) { + if (task_eager_start(state, self)) { + return -1; + } + return 0; + } + } + if (task_call_step_soon(state, self, NULL)) { return -1; } @@ -2831,6 +2895,20 @@ task_step_impl(asyncio_state *state, TaskObj *task, PyObject *exc) Py_RETURN_NONE; } + PyObject *ret = task_step_handle_result_impl(state, task, result); + return ret; + +fail: + return NULL; +} + + +static PyObject * +task_step_handle_result_impl(asyncio_state *state, TaskObj *task, PyObject *result) +{ + int res; + PyObject *o; + if (result == (PyObject*)task) { /* We have a task that wants to await on itself */ goto self_await; @@ -3062,6 +3140,65 @@ task_step(asyncio_state *state, TaskObj *task, PyObject *exc) } } +static int +task_eager_start(asyncio_state *state, TaskObj *task) +{ + assert(task != NULL); + PyObject *prevtask = swap_current_task(state, task->task_loop, (PyObject *)task); + if (prevtask == NULL) { + return -1; + } + + if (register_eager_task(state, (PyObject *)task) == -1) { + Py_DECREF(prevtask); + return -1; + } + + if (PyContext_Enter(task->task_context) == -1) { + Py_DECREF(prevtask); + return -1; + } + + int retval = 0; + + PyObject *stepres = task_step_impl(state, task, NULL); + if (stepres == NULL) { + PyObject *exc = PyErr_GetRaisedException(); + _PyErr_ChainExceptions1(exc); + retval = -1; + } else { + Py_DECREF(stepres); + } + + PyObject *curtask = swap_current_task(state, task->task_loop, prevtask); + Py_DECREF(prevtask); + if (curtask == NULL) { + retval = -1; + } else { + assert(curtask == (PyObject *)task); + Py_DECREF(curtask); + } + + if (unregister_eager_task(state, (PyObject *)task) == -1) { + retval = -1; + } + + if (PyContext_Exit(task->task_context) == -1) { + retval = -1; + } + + if (task->task_state == STATE_PENDING) { + if (register_task(state, (PyObject *)task) == -1) { + retval = -1; + } + } else { + // This seems to really help performance on pyperformance benchmarks + Py_CLEAR(task->task_coro); + } + + return retval; +} + static PyObject * task_wakeup(TaskObj *task, PyObject *o) { @@ -3225,6 +3362,27 @@ _asyncio__register_task_impl(PyObject *module, PyObject *task) Py_RETURN_NONE; } +/*[clinic input] +_asyncio._register_eager_task + + task: object + +Register a new task in asyncio as executed by loop. + +Returns None. +[clinic start generated code]*/ + +static PyObject * +_asyncio__register_eager_task_impl(PyObject *module, PyObject *task) +/*[clinic end generated code: output=dfe1d45367c73f1a input=237f684683398c51]*/ +{ + asyncio_state *state = get_asyncio_state(module); + if (register_eager_task(state, task) < 0) { + return NULL; + } + Py_RETURN_NONE; +} + /*[clinic input] _asyncio._unregister_task @@ -3247,6 +3405,27 @@ _asyncio__unregister_task_impl(PyObject *module, PyObject *task) Py_RETURN_NONE; } +/*[clinic input] +_asyncio._unregister_eager_task + + task: object + +Unregister a task. + +Returns None. +[clinic start generated code]*/ + +static PyObject * +_asyncio__unregister_eager_task_impl(PyObject *module, PyObject *task) +/*[clinic end generated code: output=a426922bd07f23d1 input=9d07401ef14ee048]*/ +{ + asyncio_state *state = get_asyncio_state(module); + if (unregister_eager_task(state, task) < 0) { + return NULL; + } + Py_RETURN_NONE; +} + /*[clinic input] _asyncio._enter_task @@ -3298,6 +3477,27 @@ _asyncio__leave_task_impl(PyObject *module, PyObject *loop, PyObject *task) } +/*[clinic input] +_asyncio._swap_current_task + + loop: object + task: object + +Temporarily swap in the supplied task and return the original one (or None). + +This is intended for use during eager coroutine execution. + +[clinic start generated code]*/ + +static PyObject * +_asyncio__swap_current_task_impl(PyObject *module, PyObject *loop, + PyObject *task) +/*[clinic end generated code: output=9f88de958df74c7e input=c9c72208d3d38b6c]*/ +{ + return swap_current_task(get_asyncio_state(module), loop, task); +} + + /*[clinic input] _asyncio.current_task @@ -3340,22 +3540,22 @@ _asyncio_current_task_impl(PyObject *module, PyObject *loop) static void -module_free_freelists(void) +module_free_freelists(asyncio_state *state) { PyObject *next; PyObject *current; - next = (PyObject*) fi_freelist; + next = (PyObject*) state->fi_freelist; while (next != NULL) { - assert(fi_freelist_len > 0); - fi_freelist_len--; + assert(state->fi_freelist_len > 0); + state->fi_freelist_len--; current = next; next = (PyObject*) ((futureiterobject*) current)->future; PyObject_GC_Del(current); } - assert(fi_freelist_len == 0); - fi_freelist = NULL; + assert(state->fi_freelist_len == 0); + state->fi_freelist = NULL; } static int @@ -3379,14 +3579,15 @@ module_traverse(PyObject *mod, visitproc visit, void *arg) Py_VISIT(state->asyncio_InvalidStateError); Py_VISIT(state->asyncio_CancelledError); - Py_VISIT(state->all_tasks); + Py_VISIT(state->scheduled_tasks); + Py_VISIT(state->eager_tasks); Py_VISIT(state->current_tasks); Py_VISIT(state->iscoroutine_typecache); Py_VISIT(state->context_kwname); // Visit freelist. - PyObject *next = (PyObject*) fi_freelist; + PyObject *next = (PyObject*) state->fi_freelist; while (next != NULL) { PyObject *current = next; Py_VISIT(current); @@ -3416,13 +3617,14 @@ module_clear(PyObject *mod) Py_CLEAR(state->asyncio_InvalidStateError); Py_CLEAR(state->asyncio_CancelledError); - Py_CLEAR(state->all_tasks); + Py_CLEAR(state->scheduled_tasks); + Py_CLEAR(state->eager_tasks); Py_CLEAR(state->current_tasks); Py_CLEAR(state->iscoroutine_typecache); Py_CLEAR(state->context_kwname); - module_free_freelists(); + module_free_freelists(state); return 0; } @@ -3496,9 +3698,14 @@ module_init(asyncio_state *state) PyObject *weak_set; WITH_MOD("weakref") GET_MOD_ATTR(weak_set, "WeakSet"); - state->all_tasks = PyObject_CallNoArgs(weak_set); + state->scheduled_tasks = PyObject_CallNoArgs(weak_set); Py_CLEAR(weak_set); - if (state->all_tasks == NULL) { + if (state->scheduled_tasks == NULL) { + goto fail; + } + + state->eager_tasks = PySet_New(NULL); + if (state->eager_tasks == NULL) { goto fail; } @@ -3522,9 +3729,12 @@ static PyMethodDef asyncio_methods[] = { _ASYNCIO__GET_RUNNING_LOOP_METHODDEF _ASYNCIO__SET_RUNNING_LOOP_METHODDEF _ASYNCIO__REGISTER_TASK_METHODDEF + _ASYNCIO__REGISTER_EAGER_TASK_METHODDEF _ASYNCIO__UNREGISTER_TASK_METHODDEF + _ASYNCIO__UNREGISTER_EAGER_TASK_METHODDEF _ASYNCIO__ENTER_TASK_METHODDEF _ASYNCIO__LEAVE_TASK_METHODDEF + _ASYNCIO__SWAP_CURRENT_TASK_METHODDEF {NULL, NULL} }; @@ -3561,7 +3771,11 @@ module_exec(PyObject *mod) return -1; } - if (PyModule_AddObjectRef(mod, "_all_tasks", state->all_tasks) < 0) { + if (PyModule_AddObjectRef(mod, "_scheduled_tasks", state->scheduled_tasks) < 0) { + return -1; + } + + if (PyModule_AddObjectRef(mod, "_eager_tasks", state->eager_tasks) < 0) { return -1; } @@ -3575,6 +3789,7 @@ module_exec(PyObject *mod) static struct PyModuleDef_Slot module_slots[] = { {Py_mod_exec, module_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL}, }; diff --git a/Modules/_bisectmodule.c b/Modules/_bisectmodule.c index 30801c2f87eee7..0773bbd191931d 100644 --- a/Modules/_bisectmodule.c +++ b/Modules/_bisectmodule.c @@ -457,6 +457,7 @@ bisect_modexec(PyObject *m) static PyModuleDef_Slot bisect_slots[] = { {Py_mod_exec, bisect_modexec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_blake2/blake2module.c b/Modules/_blake2/blake2module.c index 44d783b40d0453..0d1d88c6603684 100644 --- a/Modules/_blake2/blake2module.c +++ b/Modules/_blake2/blake2module.c @@ -127,6 +127,7 @@ blake2_exec(PyObject *m) static PyModuleDef_Slot _blake2_slots[] = { {Py_mod_exec, blake2_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; @@ -146,4 +147,4 @@ PyMODINIT_FUNC PyInit__blake2(void) { return PyModuleDef_Init(&blake2_module); -} \ No newline at end of file +} diff --git a/Modules/_blake2/blake2module.h b/Modules/_blake2/blake2module.h index aa8f281178eadc..c8144ec9d48d29 100644 --- a/Modules/_blake2/blake2module.h +++ b/Modules/_blake2/blake2module.h @@ -38,6 +38,6 @@ #endif // HAVE_LIBB2 // for secure_zero_memory(), store32(), store48(), and store64() -#include "impl/blake2-impl.h" +#include "impl/blake2-impl.h" #endif // Py_BLAKE2MODULE_H diff --git a/Modules/_blake2/impl/blake2b-round.h b/Modules/_blake2/impl/blake2b-round.h index cebc22550da4cd..5b452c4d63babe 100644 --- a/Modules/_blake2/impl/blake2b-round.h +++ b/Modules/_blake2/impl/blake2b-round.h @@ -62,7 +62,7 @@ \ row2l = _mm_roti_epi64(row2l, -24); \ row2h = _mm_roti_epi64(row2h, -24); \ - + #define G2(row1l,row2l,row3l,row4l,row1h,row2h,row3h,row4h,b0,b1) \ row1l = _mm_add_epi64(_mm_add_epi64(row1l, b0), row2l); \ row1h = _mm_add_epi64(_mm_add_epi64(row1h, b1), row2h); \ @@ -81,7 +81,7 @@ \ row2l = _mm_roti_epi64(row2l, -63); \ row2h = _mm_roti_epi64(row2h, -63); \ - + #if defined(HAVE_SSSE3) #define DIAGONALIZE(row1l,row2l,row3l,row4l,row1h,row2h,row3h,row4h) \ t0 = _mm_alignr_epi8(row2h, row2l, 8); \ diff --git a/Modules/_blake2/impl/blake2s-load-xop.h b/Modules/_blake2/impl/blake2s-load-xop.h index ac591a77d191a7..14d9e7f7640672 100644 --- a/Modules/_blake2/impl/blake2s-load-xop.h +++ b/Modules/_blake2/impl/blake2s-load-xop.h @@ -166,7 +166,7 @@ buf = _mm_perm_epi8(t1, m3, _mm_set_epi32(TOB(3),TOB(2),TOB(1),TOB(7)) ); #define LOAD_MSG_8_3(buf) \ t0 = _mm_perm_epi8(m0, m2, _mm_set_epi32(TOB(6),TOB(1),TOB(0),TOB(0)) ); \ buf = _mm_perm_epi8(t0, m3, _mm_set_epi32(TOB(3),TOB(2),TOB(5),TOB(4)) ); \ - + #define LOAD_MSG_8_4(buf) \ buf = _mm_perm_epi8(m0, m1, _mm_set_epi32(TOB(5),TOB(4),TOB(7),TOB(2)) ); diff --git a/Modules/_blake2/impl/blake2s-round.h b/Modules/_blake2/impl/blake2s-round.h index 1e2f2b7f59bd6c..3af4be35bee5d4 100644 --- a/Modules/_blake2/impl/blake2s-round.h +++ b/Modules/_blake2/impl/blake2s-round.h @@ -86,6 +86,6 @@ LOAD_MSG_ ##r ##_4(buf4); \ G2(row1,row2,row3,row4,buf4); \ UNDIAGONALIZE(row1,row2,row3,row4); \ - + #endif diff --git a/Modules/_bz2module.c b/Modules/_bz2module.c index 8e7b8e8078af4e..97bd44b4ac9694 100644 --- a/Modules/_bz2module.c +++ b/Modules/_bz2module.c @@ -799,6 +799,7 @@ _bz2_free(void *module) static struct PyModuleDef_Slot _bz2_slots[] = { {Py_mod_exec, _bz2_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_codecsmodule.c b/Modules/_codecsmodule.c index d5035d20600ae2..777c753bd7c2a9 100644 --- a/Modules/_codecsmodule.c +++ b/Modules/_codecsmodule.c @@ -1049,6 +1049,7 @@ static PyMethodDef _codecs_functions[] = { }; static PyModuleDef_Slot _codecs_slots[] = { + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_collectionsmodule.c b/Modules/_collectionsmodule.c index a9b1425177c3d7..9a81531bdffb16 100644 --- a/Modules/_collectionsmodule.c +++ b/Modules/_collectionsmodule.c @@ -2571,6 +2571,7 @@ collections_exec(PyObject *module) { static struct PyModuleDef_Slot collections_slots[] = { {Py_mod_exec, collections_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_contextvarsmodule.c b/Modules/_contextvarsmodule.c index d13b5962c13c44..f621c1de6d42d6 100644 --- a/Modules/_contextvarsmodule.c +++ b/Modules/_contextvarsmodule.c @@ -44,6 +44,7 @@ _contextvars_exec(PyObject *m) static struct PyModuleDef_Slot _contextvars_slots[] = { {Py_mod_exec, _contextvars_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_cryptmodule.c b/Modules/_cryptmodule.c index 72a4f44600d92c..75035084c9cd29 100644 --- a/Modules/_cryptmodule.c +++ b/Modules/_cryptmodule.c @@ -58,6 +58,7 @@ static PyMethodDef crypt_methods[] = { }; static PyModuleDef_Slot _crypt_slots[] = { + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_csv.c b/Modules/_csv.c index 2217cc2ca7a775..9ab2ad266c2739 100644 --- a/Modules/_csv.c +++ b/Modules/_csv.c @@ -1000,7 +1000,7 @@ PyType_Spec Reader_Type_spec = { .name = "_csv.reader", .basicsize = sizeof(ReaderObj), .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC | - Py_TPFLAGS_IMMUTABLETYPE), + Py_TPFLAGS_IMMUTABLETYPE | Py_TPFLAGS_DISALLOW_INSTANTIATION), .slots = Reader_Type_slots }; @@ -1431,7 +1431,7 @@ PyType_Spec Writer_Type_spec = { .name = "_csv.writer", .basicsize = sizeof(WriterObj), .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC | - Py_TPFLAGS_IMMUTABLETYPE), + Py_TPFLAGS_IMMUTABLETYPE | Py_TPFLAGS_DISALLOW_INSTANTIATION), .slots = Writer_Type_slots, }; @@ -1798,6 +1798,7 @@ csv_exec(PyObject *module) { static PyModuleDef_Slot csv_slots[] = { {Py_mod_exec, csv_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c index c7ed6bd2229c79..f6cda45eaeac27 100644 --- a/Modules/_ctypes/_ctypes.c +++ b/Modules/_ctypes/_ctypes.c @@ -139,10 +139,6 @@ static PyTypeObject Simple_Type; strong reference to _ctypes._unpickle() function */ static PyObject *_unpickle; -#ifdef MS_WIN32 -PyObject *ComError; // Borrowed reference to: &PyComError_Type -#endif - /****************************************************************/ @@ -5480,46 +5476,38 @@ comerror_init(PyObject *self, PyObject *args, PyObject *kwds) return 0; } -static PyTypeObject PyComError_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_ctypes.COMError", /* tp_name */ - sizeof(PyBaseExceptionObject), /* tp_basicsize */ - 0, /* tp_itemsize */ - 0, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - PyDoc_STR(comerror_doc), /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - 0, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - (initproc)comerror_init, /* tp_init */ - 0, /* tp_alloc */ - 0, /* tp_new */ +static int +comerror_traverse(PyObject *self, visitproc visit, void *arg) +{ + Py_VISIT(Py_TYPE(self)); + return 0; +} + +static void +comerror_dealloc(PyObject *self) +{ + PyTypeObject *tp = Py_TYPE(self); + PyObject_GC_UnTrack(self); + tp->tp_free(self); + Py_DECREF(tp); +} + +static PyType_Slot comerror_slots[] = { + {Py_tp_doc, (void *)PyDoc_STR(comerror_doc)}, + {Py_tp_init, comerror_init}, + {Py_tp_traverse, comerror_traverse}, + {Py_tp_dealloc, comerror_dealloc}, + {0, NULL}, }; + +static PyType_Spec comerror_spec = { + .name = "_ctypes.COMError", + .basicsize = sizeof(PyBaseExceptionObject), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | + Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_IMMUTABLETYPE), + .slots = comerror_slots, +}; + #endif // MS_WIN32 static PyObject * @@ -5661,8 +5649,9 @@ _ctypes_add_types(PyObject *mod) } \ } while (0) -#define CREATE_TYPE(MOD, TP, SPEC) do { \ - PyObject *type = PyType_FromMetaclass(NULL, MOD, SPEC, NULL); \ +#define CREATE_TYPE(MOD, TP, SPEC, BASE) do { \ + PyObject *type = PyType_FromMetaclass(NULL, MOD, SPEC, \ + (PyObject *)BASE); \ if (type == NULL) { \ return -1; \ } \ @@ -5675,8 +5664,8 @@ _ctypes_add_types(PyObject *mod) ob_type is the metatype (the 'type'), defaults to PyType_Type, tp_base is the base type, defaults to 'object' aka PyBaseObject_Type. */ - CREATE_TYPE(mod, st->PyCArg_Type, &carg_spec); - CREATE_TYPE(mod, st->PyCThunk_Type, &cthunk_spec); + CREATE_TYPE(mod, st->PyCArg_Type, &carg_spec, NULL); + CREATE_TYPE(mod, st->PyCThunk_Type, &cthunk_spec, NULL); TYPE_READY(&PyCData_Type); /* StgDict is derived from PyDict_Type */ TYPE_READY_BASE(&PyCStgDict_Type, &PyDict_Type); @@ -5709,18 +5698,18 @@ _ctypes_add_types(PyObject *mod) * Simple classes */ - CREATE_TYPE(mod, st->PyCField_Type, &cfield_spec); + CREATE_TYPE(mod, st->PyCField_Type, &cfield_spec, NULL); /************************************************* * * Other stuff */ - CREATE_TYPE(mod, st->DictRemover_Type, &dictremover_spec); - CREATE_TYPE(mod, st->StructParam_Type, &structparam_spec); + CREATE_TYPE(mod, st->DictRemover_Type, &dictremover_spec, NULL); + CREATE_TYPE(mod, st->StructParam_Type, &structparam_spec, NULL); #ifdef MS_WIN32 - TYPE_READY_BASE(&PyComError_Type, (PyTypeObject*)PyExc_Exception); + CREATE_TYPE(mod, st->PyComError_Type, &comerror_spec, PyExc_Exception); #endif #undef TYPE_READY @@ -5750,7 +5739,8 @@ _ctypes_add_objects(PyObject *mod) MOD_ADD("_pointer_type_cache", Py_NewRef(_ctypes_ptrtype_cache)); #ifdef MS_WIN32 - MOD_ADD("COMError", Py_NewRef(ComError)); + ctypes_state *st = GLOBAL_STATE(); + MOD_ADD("COMError", Py_NewRef(st->PyComError_Type)); MOD_ADD("FUNCFLAG_HRESULT", PyLong_FromLong(FUNCFLAG_HRESULT)); MOD_ADD("FUNCFLAG_STDCALL", PyLong_FromLong(FUNCFLAG_STDCALL)); #endif @@ -5807,9 +5797,6 @@ _ctypes_mod_exec(PyObject *mod) if (_ctypes_add_types(mod) < 0) { return -1; } -#ifdef MS_WIN32 - ComError = (PyObject*)&PyComError_Type; -#endif if (_ctypes_add_objects(mod) < 0) { return -1; diff --git a/Modules/_ctypes/_ctypes_test.c b/Modules/_ctypes/_ctypes_test.c index a8811d03cc91a2..ddfb2c8a332a9e 100644 --- a/Modules/_ctypes/_ctypes_test.c +++ b/Modules/_ctypes/_ctypes_test.c @@ -1036,7 +1036,7 @@ EXPORT (HRESULT) KeepObject(IUnknown *punk) #ifdef MS_WIN32 -// i38748: c stub for testing stack corruption +// i38748: c stub for testing stack corruption // When executing a Python callback with a long and a long long typedef long(__stdcall *_test_i38748_funcType)(long, long long); @@ -1054,6 +1054,7 @@ _testfunc_pylist_append(PyObject *list, PyObject *item) } static struct PyModuleDef_Slot _ctypes_test_slots[] = { + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c index 93bc784df5386f..d2fe525dd4d396 100644 --- a/Modules/_ctypes/callproc.c +++ b/Modules/_ctypes/callproc.c @@ -1115,7 +1115,8 @@ GetComError(HRESULT errcode, GUID *riid, IUnknown *pIunk) descr, source, helpfile, helpcontext, progid); if (obj) { - PyErr_SetObject(ComError, obj); + ctypes_state *st = GLOBAL_STATE(); + PyErr_SetObject((PyObject *)st->PyComError_Type, obj); Py_DECREF(obj); } LocalFree(text); @@ -1823,7 +1824,7 @@ resize(PyObject *self, PyObject *args) dict = PyObject_stgdict((PyObject *)obj); if (dict == NULL) { PyErr_SetString(PyExc_TypeError, - "excepted ctypes instance"); + "expected ctypes instance"); return NULL; } if (size < dict->size) { diff --git a/Modules/_ctypes/ctypes.h b/Modules/_ctypes/ctypes.h index 252d9da7dbb56d..8891a0a741de7b 100644 --- a/Modules/_ctypes/ctypes.h +++ b/Modules/_ctypes/ctypes.h @@ -37,6 +37,9 @@ typedef struct { PyTypeObject *PyCArg_Type; PyTypeObject *PyCField_Type; PyTypeObject *PyCThunk_Type; +#ifdef MS_WIN32 + PyTypeObject *PyComError_Type; +#endif PyTypeObject *StructParam_Type; } ctypes_state; @@ -392,10 +395,6 @@ extern int _ctypes_simple_instance(PyObject *obj); extern PyObject *_ctypes_ptrtype_cache; PyObject *_ctypes_get_errobj(int **pspace); -#ifdef MS_WIN32 -extern PyObject *ComError; -#endif - #ifdef USING_MALLOC_CLOSURE_DOT_C void Py_ffi_closure_free(void *p); void *Py_ffi_closure_alloc(size_t size, void** codeloc); diff --git a/Modules/_curses_panel.c b/Modules/_curses_panel.c index 2144345de01ba3..a3124ff80551e0 100644 --- a/Modules/_curses_panel.c +++ b/Modules/_curses_panel.c @@ -690,6 +690,9 @@ _curses_panel_exec(PyObject *mod) static PyModuleDef_Slot _curses_slots[] = { {Py_mod_exec, _curses_panel_exec}, + // XXX gh-103092: fix isolation. + {Py_mod_multiple_interpreters, Py_MOD_MULTIPLE_INTERPRETERS_NOT_SUPPORTED}, + //{Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_dbmmodule.c b/Modules/_dbmmodule.c index 54376022dcb182..9908174c94c450 100644 --- a/Modules/_dbmmodule.c +++ b/Modules/_dbmmodule.c @@ -583,6 +583,7 @@ _dbm_module_free(void *module) static PyModuleDef_Slot _dbmmodule_slots[] = { {Py_mod_exec, _dbm_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_elementtree.c b/Modules/_elementtree.c index 97be89a167104f..42de3c675c2e5a 100644 --- a/Modules/_elementtree.c +++ b/Modules/_elementtree.c @@ -4419,6 +4419,9 @@ module_exec(PyObject *m) static struct PyModuleDef_Slot elementtree_slots[] = { {Py_mod_exec, module_exec}, + // XXX gh-103092: fix isolation. + {Py_mod_multiple_interpreters, Py_MOD_MULTIPLE_INTERPRETERS_NOT_SUPPORTED}, + //{Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL}, }; diff --git a/Modules/_functoolsmodule.c b/Modules/_functoolsmodule.c index 4032ba79374fa4..a8001d71223fdc 100644 --- a/Modules/_functoolsmodule.c +++ b/Modules/_functoolsmodule.c @@ -1520,6 +1520,7 @@ _functools_free(void *module) static struct PyModuleDef_Slot _functools_slots[] = { {Py_mod_exec, _functools_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_gdbmmodule.c b/Modules/_gdbmmodule.c index 4e8acdefc722b2..4dbb5741b2ede8 100644 --- a/Modules/_gdbmmodule.c +++ b/Modules/_gdbmmodule.c @@ -793,6 +793,7 @@ _gdbm_module_free(void *module) static PyModuleDef_Slot _gdbm_module_slots[] = { {Py_mod_exec, _gdbm_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_hacl/Hacl_Hash_SHA3.c b/Modules/_hacl/Hacl_Hash_SHA3.c new file mode 100644 index 00000000000000..100afe7c2c6d1f --- /dev/null +++ b/Modules/_hacl/Hacl_Hash_SHA3.c @@ -0,0 +1,826 @@ +/* MIT License + * + * Copyright (c) 2016-2022 INRIA, CMU and Microsoft Corporation + * Copyright (c) 2022-2023 HACL* Contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + + +#include "internal/Hacl_Hash_SHA3.h" + +static uint32_t block_len(Spec_Hash_Definitions_hash_alg a) +{ + switch (a) + { + case Spec_Hash_Definitions_SHA3_224: + { + return (uint32_t)144U; + } + case Spec_Hash_Definitions_SHA3_256: + { + return (uint32_t)136U; + } + case Spec_Hash_Definitions_SHA3_384: + { + return (uint32_t)104U; + } + case Spec_Hash_Definitions_SHA3_512: + { + return (uint32_t)72U; + } + case Spec_Hash_Definitions_Shake128: + { + return (uint32_t)168U; + } + case Spec_Hash_Definitions_Shake256: + { + return (uint32_t)136U; + } + default: + { + KRML_HOST_EPRINTF("KaRaMeL incomplete match at %s:%d\n", __FILE__, __LINE__); + KRML_HOST_EXIT(253U); + } + } +} + +static uint32_t hash_len(Spec_Hash_Definitions_hash_alg a) +{ + switch (a) + { + case Spec_Hash_Definitions_SHA3_224: + { + return (uint32_t)28U; + } + case Spec_Hash_Definitions_SHA3_256: + { + return (uint32_t)32U; + } + case Spec_Hash_Definitions_SHA3_384: + { + return (uint32_t)48U; + } + case Spec_Hash_Definitions_SHA3_512: + { + return (uint32_t)64U; + } + default: + { + KRML_HOST_EPRINTF("KaRaMeL incomplete match at %s:%d\n", __FILE__, __LINE__); + KRML_HOST_EXIT(253U); + } + } +} + +void +Hacl_Hash_SHA3_update_multi_sha3( + Spec_Hash_Definitions_hash_alg a, + uint64_t *s, + uint8_t *blocks, + uint32_t n_blocks +) +{ + for (uint32_t i = (uint32_t)0U; i < n_blocks; i++) + { + uint8_t *block = blocks + i * block_len(a); + Hacl_Impl_SHA3_absorb_inner(block_len(a), block, s); + } +} + +void +Hacl_Hash_SHA3_update_last_sha3( + Spec_Hash_Definitions_hash_alg a, + uint64_t *s, + uint8_t *input, + uint32_t input_len +) +{ + uint8_t suffix; + if (a == Spec_Hash_Definitions_Shake128 || a == Spec_Hash_Definitions_Shake256) + { + suffix = (uint8_t)0x1fU; + } + else + { + suffix = (uint8_t)0x06U; + } + uint32_t len = block_len(a); + if (input_len == len) + { + Hacl_Impl_SHA3_absorb_inner(len, input, s); + uint8_t *uu____0 = input + input_len; + uint8_t lastBlock_[200U] = { 0U }; + uint8_t *lastBlock = lastBlock_; + memcpy(lastBlock, uu____0, (uint32_t)0U * sizeof (uint8_t)); + lastBlock[0U] = suffix; + Hacl_Impl_SHA3_loadState(len, lastBlock, s); + if (!((suffix & (uint8_t)0x80U) == (uint8_t)0U) && (uint32_t)0U == len - (uint32_t)1U) + { + Hacl_Impl_SHA3_state_permute(s); + } + uint8_t nextBlock_[200U] = { 0U }; + uint8_t *nextBlock = nextBlock_; + nextBlock[len - (uint32_t)1U] = (uint8_t)0x80U; + Hacl_Impl_SHA3_loadState(len, nextBlock, s); + Hacl_Impl_SHA3_state_permute(s); + return; + } + uint8_t lastBlock_[200U] = { 0U }; + uint8_t *lastBlock = lastBlock_; + memcpy(lastBlock, input, input_len * sizeof (uint8_t)); + lastBlock[input_len] = suffix; + Hacl_Impl_SHA3_loadState(len, lastBlock, s); + if (!((suffix & (uint8_t)0x80U) == (uint8_t)0U) && input_len == len - (uint32_t)1U) + { + Hacl_Impl_SHA3_state_permute(s); + } + uint8_t nextBlock_[200U] = { 0U }; + uint8_t *nextBlock = nextBlock_; + nextBlock[len - (uint32_t)1U] = (uint8_t)0x80U; + Hacl_Impl_SHA3_loadState(len, nextBlock, s); + Hacl_Impl_SHA3_state_permute(s); +} + +typedef struct hash_buf2_s +{ + Hacl_Streaming_Keccak_hash_buf fst; + Hacl_Streaming_Keccak_hash_buf snd; +} +hash_buf2; + +Spec_Hash_Definitions_hash_alg Hacl_Streaming_Keccak_get_alg(Hacl_Streaming_Keccak_state *s) +{ + Hacl_Streaming_Keccak_state scrut = *s; + Hacl_Streaming_Keccak_hash_buf block_state = scrut.block_state; + return block_state.fst; +} + +Hacl_Streaming_Keccak_state *Hacl_Streaming_Keccak_malloc(Spec_Hash_Definitions_hash_alg a) +{ + KRML_CHECK_SIZE(sizeof (uint8_t), block_len(a)); + uint8_t *buf0 = (uint8_t *)KRML_HOST_CALLOC(block_len(a), sizeof (uint8_t)); + uint64_t *buf = (uint64_t *)KRML_HOST_CALLOC((uint32_t)25U, sizeof (uint64_t)); + Hacl_Streaming_Keccak_hash_buf block_state = { .fst = a, .snd = buf }; + Hacl_Streaming_Keccak_state + s = { .block_state = block_state, .buf = buf0, .total_len = (uint64_t)(uint32_t)0U }; + Hacl_Streaming_Keccak_state + *p = (Hacl_Streaming_Keccak_state *)KRML_HOST_MALLOC(sizeof (Hacl_Streaming_Keccak_state)); + p[0U] = s; + uint64_t *s1 = block_state.snd; + for (uint32_t _i = 0U; _i < (uint32_t)25U; ++_i) + ((void **)s1)[_i] = (void *)(uint64_t)0U; + return p; +} + +void Hacl_Streaming_Keccak_free(Hacl_Streaming_Keccak_state *s) +{ + Hacl_Streaming_Keccak_state scrut = *s; + uint8_t *buf = scrut.buf; + Hacl_Streaming_Keccak_hash_buf block_state = scrut.block_state; + uint64_t *s1 = block_state.snd; + KRML_HOST_FREE(s1); + KRML_HOST_FREE(buf); + KRML_HOST_FREE(s); +} + +Hacl_Streaming_Keccak_state *Hacl_Streaming_Keccak_copy(Hacl_Streaming_Keccak_state *s0) +{ + Hacl_Streaming_Keccak_state scrut0 = *s0; + Hacl_Streaming_Keccak_hash_buf block_state0 = scrut0.block_state; + uint8_t *buf0 = scrut0.buf; + uint64_t total_len0 = scrut0.total_len; + Spec_Hash_Definitions_hash_alg i = block_state0.fst; + KRML_CHECK_SIZE(sizeof (uint8_t), block_len(i)); + uint8_t *buf1 = (uint8_t *)KRML_HOST_CALLOC(block_len(i), sizeof (uint8_t)); + memcpy(buf1, buf0, block_len(i) * sizeof (uint8_t)); + uint64_t *buf = (uint64_t *)KRML_HOST_CALLOC((uint32_t)25U, sizeof (uint64_t)); + Hacl_Streaming_Keccak_hash_buf block_state = { .fst = i, .snd = buf }; + hash_buf2 scrut = { .fst = block_state0, .snd = block_state }; + uint64_t *s_dst = scrut.snd.snd; + uint64_t *s_src = scrut.fst.snd; + memcpy(s_dst, s_src, (uint32_t)25U * sizeof (uint64_t)); + Hacl_Streaming_Keccak_state + s = { .block_state = block_state, .buf = buf1, .total_len = total_len0 }; + Hacl_Streaming_Keccak_state + *p = (Hacl_Streaming_Keccak_state *)KRML_HOST_MALLOC(sizeof (Hacl_Streaming_Keccak_state)); + p[0U] = s; + return p; +} + +void Hacl_Streaming_Keccak_reset(Hacl_Streaming_Keccak_state *s) +{ + Hacl_Streaming_Keccak_state scrut = *s; + uint8_t *buf = scrut.buf; + Hacl_Streaming_Keccak_hash_buf block_state = scrut.block_state; + uint64_t *s1 = block_state.snd; + for (uint32_t _i = 0U; _i < (uint32_t)25U; ++_i) + ((void **)s1)[_i] = (void *)(uint64_t)0U; + Hacl_Streaming_Keccak_state + tmp = { .block_state = block_state, .buf = buf, .total_len = (uint64_t)(uint32_t)0U }; + s[0U] = tmp; +} + +uint32_t +Hacl_Streaming_Keccak_update(Hacl_Streaming_Keccak_state *p, uint8_t *data, uint32_t len) +{ + Hacl_Streaming_Keccak_state s = *p; + Hacl_Streaming_Keccak_hash_buf block_state = s.block_state; + uint64_t total_len = s.total_len; + Spec_Hash_Definitions_hash_alg i = block_state.fst; + if ((uint64_t)len > (uint64_t)0xffffffffU - total_len) + { + return (uint32_t)1U; + } + uint32_t sz; + if (total_len % (uint64_t)block_len(i) == (uint64_t)0U && total_len > (uint64_t)0U) + { + sz = block_len(i); + } + else + { + sz = (uint32_t)(total_len % (uint64_t)block_len(i)); + } + if (len <= block_len(i) - sz) + { + Hacl_Streaming_Keccak_state s1 = *p; + Hacl_Streaming_Keccak_hash_buf block_state1 = s1.block_state; + uint8_t *buf = s1.buf; + uint64_t total_len1 = s1.total_len; + uint32_t sz1; + if (total_len1 % (uint64_t)block_len(i) == (uint64_t)0U && total_len1 > (uint64_t)0U) + { + sz1 = block_len(i); + } + else + { + sz1 = (uint32_t)(total_len1 % (uint64_t)block_len(i)); + } + uint8_t *buf2 = buf + sz1; + memcpy(buf2, data, len * sizeof (uint8_t)); + uint64_t total_len2 = total_len1 + (uint64_t)len; + *p + = + ( + (Hacl_Streaming_Keccak_state){ + .block_state = block_state1, + .buf = buf, + .total_len = total_len2 + } + ); + } + else if (sz == (uint32_t)0U) + { + Hacl_Streaming_Keccak_state s1 = *p; + Hacl_Streaming_Keccak_hash_buf block_state1 = s1.block_state; + uint8_t *buf = s1.buf; + uint64_t total_len1 = s1.total_len; + uint32_t sz1; + if (total_len1 % (uint64_t)block_len(i) == (uint64_t)0U && total_len1 > (uint64_t)0U) + { + sz1 = block_len(i); + } + else + { + sz1 = (uint32_t)(total_len1 % (uint64_t)block_len(i)); + } + if (!(sz1 == (uint32_t)0U)) + { + Spec_Hash_Definitions_hash_alg a1 = block_state1.fst; + uint64_t *s2 = block_state1.snd; + Hacl_Hash_SHA3_update_multi_sha3(a1, s2, buf, block_len(i) / block_len(a1)); + } + uint32_t ite; + if ((uint64_t)len % (uint64_t)block_len(i) == (uint64_t)0U && (uint64_t)len > (uint64_t)0U) + { + ite = block_len(i); + } + else + { + ite = (uint32_t)((uint64_t)len % (uint64_t)block_len(i)); + } + uint32_t n_blocks = (len - ite) / block_len(i); + uint32_t data1_len = n_blocks * block_len(i); + uint32_t data2_len = len - data1_len; + uint8_t *data1 = data; + uint8_t *data2 = data + data1_len; + Spec_Hash_Definitions_hash_alg a1 = block_state1.fst; + uint64_t *s2 = block_state1.snd; + Hacl_Hash_SHA3_update_multi_sha3(a1, s2, data1, data1_len / block_len(a1)); + uint8_t *dst = buf; + memcpy(dst, data2, data2_len * sizeof (uint8_t)); + *p + = + ( + (Hacl_Streaming_Keccak_state){ + .block_state = block_state1, + .buf = buf, + .total_len = total_len1 + (uint64_t)len + } + ); + } + else + { + uint32_t diff = block_len(i) - sz; + uint8_t *data1 = data; + uint8_t *data2 = data + diff; + Hacl_Streaming_Keccak_state s1 = *p; + Hacl_Streaming_Keccak_hash_buf block_state10 = s1.block_state; + uint8_t *buf0 = s1.buf; + uint64_t total_len10 = s1.total_len; + uint32_t sz10; + if (total_len10 % (uint64_t)block_len(i) == (uint64_t)0U && total_len10 > (uint64_t)0U) + { + sz10 = block_len(i); + } + else + { + sz10 = (uint32_t)(total_len10 % (uint64_t)block_len(i)); + } + uint8_t *buf2 = buf0 + sz10; + memcpy(buf2, data1, diff * sizeof (uint8_t)); + uint64_t total_len2 = total_len10 + (uint64_t)diff; + *p + = + ( + (Hacl_Streaming_Keccak_state){ + .block_state = block_state10, + .buf = buf0, + .total_len = total_len2 + } + ); + Hacl_Streaming_Keccak_state s10 = *p; + Hacl_Streaming_Keccak_hash_buf block_state1 = s10.block_state; + uint8_t *buf = s10.buf; + uint64_t total_len1 = s10.total_len; + uint32_t sz1; + if (total_len1 % (uint64_t)block_len(i) == (uint64_t)0U && total_len1 > (uint64_t)0U) + { + sz1 = block_len(i); + } + else + { + sz1 = (uint32_t)(total_len1 % (uint64_t)block_len(i)); + } + if (!(sz1 == (uint32_t)0U)) + { + Spec_Hash_Definitions_hash_alg a1 = block_state1.fst; + uint64_t *s2 = block_state1.snd; + Hacl_Hash_SHA3_update_multi_sha3(a1, s2, buf, block_len(i) / block_len(a1)); + } + uint32_t ite; + if + ( + (uint64_t)(len - diff) + % (uint64_t)block_len(i) + == (uint64_t)0U + && (uint64_t)(len - diff) > (uint64_t)0U + ) + { + ite = block_len(i); + } + else + { + ite = (uint32_t)((uint64_t)(len - diff) % (uint64_t)block_len(i)); + } + uint32_t n_blocks = (len - diff - ite) / block_len(i); + uint32_t data1_len = n_blocks * block_len(i); + uint32_t data2_len = len - diff - data1_len; + uint8_t *data11 = data2; + uint8_t *data21 = data2 + data1_len; + Spec_Hash_Definitions_hash_alg a1 = block_state1.fst; + uint64_t *s2 = block_state1.snd; + Hacl_Hash_SHA3_update_multi_sha3(a1, s2, data11, data1_len / block_len(a1)); + uint8_t *dst = buf; + memcpy(dst, data21, data2_len * sizeof (uint8_t)); + *p + = + ( + (Hacl_Streaming_Keccak_state){ + .block_state = block_state1, + .buf = buf, + .total_len = total_len1 + (uint64_t)(len - diff) + } + ); + } + return (uint32_t)0U; +} + +static void +finish_( + Spec_Hash_Definitions_hash_alg a, + Hacl_Streaming_Keccak_state *p, + uint8_t *dst, + uint32_t l +) +{ + Hacl_Streaming_Keccak_state scrut0 = *p; + Hacl_Streaming_Keccak_hash_buf block_state = scrut0.block_state; + uint8_t *buf_ = scrut0.buf; + uint64_t total_len = scrut0.total_len; + uint32_t r; + if (total_len % (uint64_t)block_len(a) == (uint64_t)0U && total_len > (uint64_t)0U) + { + r = block_len(a); + } + else + { + r = (uint32_t)(total_len % (uint64_t)block_len(a)); + } + uint8_t *buf_1 = buf_; + uint64_t buf[25U] = { 0U }; + Hacl_Streaming_Keccak_hash_buf tmp_block_state = { .fst = a, .snd = buf }; + hash_buf2 scrut = { .fst = block_state, .snd = tmp_block_state }; + uint64_t *s_dst = scrut.snd.snd; + uint64_t *s_src = scrut.fst.snd; + memcpy(s_dst, s_src, (uint32_t)25U * sizeof (uint64_t)); + uint32_t ite0; + if (r % block_len(a) == (uint32_t)0U && r > (uint32_t)0U) + { + ite0 = block_len(a); + } + else + { + ite0 = r % block_len(a); + } + uint8_t *buf_last = buf_1 + r - ite0; + uint8_t *buf_multi = buf_1; + Spec_Hash_Definitions_hash_alg a1 = tmp_block_state.fst; + uint64_t *s0 = tmp_block_state.snd; + Hacl_Hash_SHA3_update_multi_sha3(a1, s0, buf_multi, (uint32_t)0U / block_len(a1)); + Spec_Hash_Definitions_hash_alg a10 = tmp_block_state.fst; + uint64_t *s1 = tmp_block_state.snd; + Hacl_Hash_SHA3_update_last_sha3(a10, s1, buf_last, r); + Spec_Hash_Definitions_hash_alg a11 = tmp_block_state.fst; + uint64_t *s = tmp_block_state.snd; + if (a11 == Spec_Hash_Definitions_Shake128 || a11 == Spec_Hash_Definitions_Shake256) + { + uint32_t ite; + if (a11 == Spec_Hash_Definitions_Shake128 || a11 == Spec_Hash_Definitions_Shake256) + { + ite = l; + } + else + { + ite = hash_len(a11); + } + Hacl_Impl_SHA3_squeeze(s, block_len(a11), ite, dst); + return; + } + Hacl_Impl_SHA3_squeeze(s, block_len(a11), hash_len(a11), dst); +} + +Hacl_Streaming_Keccak_error_code +Hacl_Streaming_Keccak_finish(Hacl_Streaming_Keccak_state *s, uint8_t *dst) +{ + Spec_Hash_Definitions_hash_alg a1 = Hacl_Streaming_Keccak_get_alg(s); + if (a1 == Spec_Hash_Definitions_Shake128 || a1 == Spec_Hash_Definitions_Shake256) + { + return Hacl_Streaming_Keccak_InvalidAlgorithm; + } + finish_(a1, s, dst, hash_len(a1)); + return Hacl_Streaming_Keccak_Success; +} + +Hacl_Streaming_Keccak_error_code +Hacl_Streaming_Keccak_squeeze(Hacl_Streaming_Keccak_state *s, uint8_t *dst, uint32_t l) +{ + Spec_Hash_Definitions_hash_alg a1 = Hacl_Streaming_Keccak_get_alg(s); + if (!(a1 == Spec_Hash_Definitions_Shake128 || a1 == Spec_Hash_Definitions_Shake256)) + { + return Hacl_Streaming_Keccak_InvalidAlgorithm; + } + if (l == (uint32_t)0U) + { + return Hacl_Streaming_Keccak_InvalidLength; + } + finish_(a1, s, dst, l); + return Hacl_Streaming_Keccak_Success; +} + +uint32_t Hacl_Streaming_Keccak_block_len(Hacl_Streaming_Keccak_state *s) +{ + Spec_Hash_Definitions_hash_alg a1 = Hacl_Streaming_Keccak_get_alg(s); + return block_len(a1); +} + +uint32_t Hacl_Streaming_Keccak_hash_len(Hacl_Streaming_Keccak_state *s) +{ + Spec_Hash_Definitions_hash_alg a1 = Hacl_Streaming_Keccak_get_alg(s); + return hash_len(a1); +} + +bool Hacl_Streaming_Keccak_is_shake(Hacl_Streaming_Keccak_state *s) +{ + Spec_Hash_Definitions_hash_alg uu____0 = Hacl_Streaming_Keccak_get_alg(s); + return uu____0 == Spec_Hash_Definitions_Shake128 || uu____0 == Spec_Hash_Definitions_Shake256; +} + +void +Hacl_SHA3_shake128_hacl( + uint32_t inputByteLen, + uint8_t *input, + uint32_t outputByteLen, + uint8_t *output +) +{ + Hacl_Impl_SHA3_keccak((uint32_t)1344U, + (uint32_t)256U, + inputByteLen, + input, + (uint8_t)0x1FU, + outputByteLen, + output); +} + +void +Hacl_SHA3_shake256_hacl( + uint32_t inputByteLen, + uint8_t *input, + uint32_t outputByteLen, + uint8_t *output +) +{ + Hacl_Impl_SHA3_keccak((uint32_t)1088U, + (uint32_t)512U, + inputByteLen, + input, + (uint8_t)0x1FU, + outputByteLen, + output); +} + +void Hacl_SHA3_sha3_224(uint32_t inputByteLen, uint8_t *input, uint8_t *output) +{ + Hacl_Impl_SHA3_keccak((uint32_t)1152U, + (uint32_t)448U, + inputByteLen, + input, + (uint8_t)0x06U, + (uint32_t)28U, + output); +} + +void Hacl_SHA3_sha3_256(uint32_t inputByteLen, uint8_t *input, uint8_t *output) +{ + Hacl_Impl_SHA3_keccak((uint32_t)1088U, + (uint32_t)512U, + inputByteLen, + input, + (uint8_t)0x06U, + (uint32_t)32U, + output); +} + +void Hacl_SHA3_sha3_384(uint32_t inputByteLen, uint8_t *input, uint8_t *output) +{ + Hacl_Impl_SHA3_keccak((uint32_t)832U, + (uint32_t)768U, + inputByteLen, + input, + (uint8_t)0x06U, + (uint32_t)48U, + output); +} + +void Hacl_SHA3_sha3_512(uint32_t inputByteLen, uint8_t *input, uint8_t *output) +{ + Hacl_Impl_SHA3_keccak((uint32_t)576U, + (uint32_t)1024U, + inputByteLen, + input, + (uint8_t)0x06U, + (uint32_t)64U, + output); +} + +static const +uint32_t +keccak_rotc[24U] = + { + (uint32_t)1U, (uint32_t)3U, (uint32_t)6U, (uint32_t)10U, (uint32_t)15U, (uint32_t)21U, + (uint32_t)28U, (uint32_t)36U, (uint32_t)45U, (uint32_t)55U, (uint32_t)2U, (uint32_t)14U, + (uint32_t)27U, (uint32_t)41U, (uint32_t)56U, (uint32_t)8U, (uint32_t)25U, (uint32_t)43U, + (uint32_t)62U, (uint32_t)18U, (uint32_t)39U, (uint32_t)61U, (uint32_t)20U, (uint32_t)44U + }; + +static const +uint32_t +keccak_piln[24U] = + { + (uint32_t)10U, (uint32_t)7U, (uint32_t)11U, (uint32_t)17U, (uint32_t)18U, (uint32_t)3U, + (uint32_t)5U, (uint32_t)16U, (uint32_t)8U, (uint32_t)21U, (uint32_t)24U, (uint32_t)4U, + (uint32_t)15U, (uint32_t)23U, (uint32_t)19U, (uint32_t)13U, (uint32_t)12U, (uint32_t)2U, + (uint32_t)20U, (uint32_t)14U, (uint32_t)22U, (uint32_t)9U, (uint32_t)6U, (uint32_t)1U + }; + +static const +uint64_t +keccak_rndc[24U] = + { + (uint64_t)0x0000000000000001U, (uint64_t)0x0000000000008082U, (uint64_t)0x800000000000808aU, + (uint64_t)0x8000000080008000U, (uint64_t)0x000000000000808bU, (uint64_t)0x0000000080000001U, + (uint64_t)0x8000000080008081U, (uint64_t)0x8000000000008009U, (uint64_t)0x000000000000008aU, + (uint64_t)0x0000000000000088U, (uint64_t)0x0000000080008009U, (uint64_t)0x000000008000000aU, + (uint64_t)0x000000008000808bU, (uint64_t)0x800000000000008bU, (uint64_t)0x8000000000008089U, + (uint64_t)0x8000000000008003U, (uint64_t)0x8000000000008002U, (uint64_t)0x8000000000000080U, + (uint64_t)0x000000000000800aU, (uint64_t)0x800000008000000aU, (uint64_t)0x8000000080008081U, + (uint64_t)0x8000000000008080U, (uint64_t)0x0000000080000001U, (uint64_t)0x8000000080008008U + }; + +void Hacl_Impl_SHA3_state_permute(uint64_t *s) +{ + for (uint32_t i0 = (uint32_t)0U; i0 < (uint32_t)24U; i0++) + { + uint64_t _C[5U] = { 0U }; + KRML_MAYBE_FOR5(i, + (uint32_t)0U, + (uint32_t)5U, + (uint32_t)1U, + _C[i] = + s[i + + (uint32_t)0U] + ^ + (s[i + + (uint32_t)5U] + ^ (s[i + (uint32_t)10U] ^ (s[i + (uint32_t)15U] ^ s[i + (uint32_t)20U])));); + KRML_MAYBE_FOR5(i1, + (uint32_t)0U, + (uint32_t)5U, + (uint32_t)1U, + uint64_t uu____0 = _C[(i1 + (uint32_t)1U) % (uint32_t)5U]; + uint64_t + _D = + _C[(i1 + (uint32_t)4U) + % (uint32_t)5U] + ^ (uu____0 << (uint32_t)1U | uu____0 >> (uint32_t)63U); + KRML_MAYBE_FOR5(i, + (uint32_t)0U, + (uint32_t)5U, + (uint32_t)1U, + s[i1 + (uint32_t)5U * i] = s[i1 + (uint32_t)5U * i] ^ _D;);); + uint64_t x = s[1U]; + uint64_t current = x; + for (uint32_t i = (uint32_t)0U; i < (uint32_t)24U; i++) + { + uint32_t _Y = keccak_piln[i]; + uint32_t r = keccak_rotc[i]; + uint64_t temp = s[_Y]; + uint64_t uu____1 = current; + s[_Y] = uu____1 << r | uu____1 >> ((uint32_t)64U - r); + current = temp; + } + KRML_MAYBE_FOR5(i, + (uint32_t)0U, + (uint32_t)5U, + (uint32_t)1U, + uint64_t + v0 = + s[(uint32_t)0U + + (uint32_t)5U * i] + ^ (~s[(uint32_t)1U + (uint32_t)5U * i] & s[(uint32_t)2U + (uint32_t)5U * i]); + uint64_t + v1 = + s[(uint32_t)1U + + (uint32_t)5U * i] + ^ (~s[(uint32_t)2U + (uint32_t)5U * i] & s[(uint32_t)3U + (uint32_t)5U * i]); + uint64_t + v2 = + s[(uint32_t)2U + + (uint32_t)5U * i] + ^ (~s[(uint32_t)3U + (uint32_t)5U * i] & s[(uint32_t)4U + (uint32_t)5U * i]); + uint64_t + v3 = + s[(uint32_t)3U + + (uint32_t)5U * i] + ^ (~s[(uint32_t)4U + (uint32_t)5U * i] & s[(uint32_t)0U + (uint32_t)5U * i]); + uint64_t + v4 = + s[(uint32_t)4U + + (uint32_t)5U * i] + ^ (~s[(uint32_t)0U + (uint32_t)5U * i] & s[(uint32_t)1U + (uint32_t)5U * i]); + s[(uint32_t)0U + (uint32_t)5U * i] = v0; + s[(uint32_t)1U + (uint32_t)5U * i] = v1; + s[(uint32_t)2U + (uint32_t)5U * i] = v2; + s[(uint32_t)3U + (uint32_t)5U * i] = v3; + s[(uint32_t)4U + (uint32_t)5U * i] = v4;); + uint64_t c = keccak_rndc[i0]; + s[0U] = s[0U] ^ c; + } +} + +void Hacl_Impl_SHA3_loadState(uint32_t rateInBytes, uint8_t *input, uint64_t *s) +{ + uint8_t block[200U] = { 0U }; + memcpy(block, input, rateInBytes * sizeof (uint8_t)); + for (uint32_t i = (uint32_t)0U; i < (uint32_t)25U; i++) + { + uint64_t u = load64_le(block + i * (uint32_t)8U); + uint64_t x = u; + s[i] = s[i] ^ x; + } +} + +static void storeState(uint32_t rateInBytes, uint64_t *s, uint8_t *res) +{ + uint8_t block[200U] = { 0U }; + for (uint32_t i = (uint32_t)0U; i < (uint32_t)25U; i++) + { + uint64_t sj = s[i]; + store64_le(block + i * (uint32_t)8U, sj); + } + memcpy(res, block, rateInBytes * sizeof (uint8_t)); +} + +void Hacl_Impl_SHA3_absorb_inner(uint32_t rateInBytes, uint8_t *block, uint64_t *s) +{ + Hacl_Impl_SHA3_loadState(rateInBytes, block, s); + Hacl_Impl_SHA3_state_permute(s); +} + +static void +absorb( + uint64_t *s, + uint32_t rateInBytes, + uint32_t inputByteLen, + uint8_t *input, + uint8_t delimitedSuffix +) +{ + uint32_t n_blocks = inputByteLen / rateInBytes; + uint32_t rem = inputByteLen % rateInBytes; + for (uint32_t i = (uint32_t)0U; i < n_blocks; i++) + { + uint8_t *block = input + i * rateInBytes; + Hacl_Impl_SHA3_absorb_inner(rateInBytes, block, s); + } + uint8_t *last = input + n_blocks * rateInBytes; + uint8_t lastBlock_[200U] = { 0U }; + uint8_t *lastBlock = lastBlock_; + memcpy(lastBlock, last, rem * sizeof (uint8_t)); + lastBlock[rem] = delimitedSuffix; + Hacl_Impl_SHA3_loadState(rateInBytes, lastBlock, s); + if (!((delimitedSuffix & (uint8_t)0x80U) == (uint8_t)0U) && rem == rateInBytes - (uint32_t)1U) + { + Hacl_Impl_SHA3_state_permute(s); + } + uint8_t nextBlock_[200U] = { 0U }; + uint8_t *nextBlock = nextBlock_; + nextBlock[rateInBytes - (uint32_t)1U] = (uint8_t)0x80U; + Hacl_Impl_SHA3_loadState(rateInBytes, nextBlock, s); + Hacl_Impl_SHA3_state_permute(s); +} + +void +Hacl_Impl_SHA3_squeeze( + uint64_t *s, + uint32_t rateInBytes, + uint32_t outputByteLen, + uint8_t *output +) +{ + uint32_t outBlocks = outputByteLen / rateInBytes; + uint32_t remOut = outputByteLen % rateInBytes; + uint8_t *last = output + outputByteLen - remOut; + uint8_t *blocks = output; + for (uint32_t i = (uint32_t)0U; i < outBlocks; i++) + { + storeState(rateInBytes, s, blocks + i * rateInBytes); + Hacl_Impl_SHA3_state_permute(s); + } + storeState(remOut, s, last); +} + +void +Hacl_Impl_SHA3_keccak( + uint32_t rate, + uint32_t capacity, + uint32_t inputByteLen, + uint8_t *input, + uint8_t delimitedSuffix, + uint32_t outputByteLen, + uint8_t *output +) +{ + uint32_t rateInBytes = rate / (uint32_t)8U; + uint64_t s[25U] = { 0U }; + absorb(s, rateInBytes, inputByteLen, input, delimitedSuffix); + Hacl_Impl_SHA3_squeeze(s, rateInBytes, outputByteLen, output); +} + diff --git a/Modules/_hacl/Hacl_Hash_SHA3.h b/Modules/_hacl/Hacl_Hash_SHA3.h new file mode 100644 index 00000000000000..2a5cf4b1844b9d --- /dev/null +++ b/Modules/_hacl/Hacl_Hash_SHA3.h @@ -0,0 +1,136 @@ +/* MIT License + * + * Copyright (c) 2016-2022 INRIA, CMU and Microsoft Corporation + * Copyright (c) 2022-2023 HACL* Contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + + +#ifndef __Hacl_Hash_SHA3_H +#define __Hacl_Hash_SHA3_H + +#if defined(__cplusplus) +extern "C" { +#endif + +#include +#include "krml/types.h" +#include "krml/lowstar_endianness.h" +#include "krml/internal/target.h" + +#include "Hacl_Streaming_Types.h" + +typedef struct Hacl_Streaming_Keccak_hash_buf_s +{ + Spec_Hash_Definitions_hash_alg fst; + uint64_t *snd; +} +Hacl_Streaming_Keccak_hash_buf; + +typedef struct Hacl_Streaming_Keccak_state_s +{ + Hacl_Streaming_Keccak_hash_buf block_state; + uint8_t *buf; + uint64_t total_len; +} +Hacl_Streaming_Keccak_state; + +Spec_Hash_Definitions_hash_alg Hacl_Streaming_Keccak_get_alg(Hacl_Streaming_Keccak_state *s); + +Hacl_Streaming_Keccak_state *Hacl_Streaming_Keccak_malloc(Spec_Hash_Definitions_hash_alg a); + +void Hacl_Streaming_Keccak_free(Hacl_Streaming_Keccak_state *s); + +Hacl_Streaming_Keccak_state *Hacl_Streaming_Keccak_copy(Hacl_Streaming_Keccak_state *s0); + +void Hacl_Streaming_Keccak_reset(Hacl_Streaming_Keccak_state *s); + +uint32_t +Hacl_Streaming_Keccak_update(Hacl_Streaming_Keccak_state *p, uint8_t *data, uint32_t len); + +#define Hacl_Streaming_Keccak_Success 0 +#define Hacl_Streaming_Keccak_InvalidAlgorithm 1 +#define Hacl_Streaming_Keccak_InvalidLength 2 + +typedef uint8_t Hacl_Streaming_Keccak_error_code; + +Hacl_Streaming_Keccak_error_code +Hacl_Streaming_Keccak_finish(Hacl_Streaming_Keccak_state *s, uint8_t *dst); + +Hacl_Streaming_Keccak_error_code +Hacl_Streaming_Keccak_squeeze(Hacl_Streaming_Keccak_state *s, uint8_t *dst, uint32_t l); + +uint32_t Hacl_Streaming_Keccak_block_len(Hacl_Streaming_Keccak_state *s); + +uint32_t Hacl_Streaming_Keccak_hash_len(Hacl_Streaming_Keccak_state *s); + +bool Hacl_Streaming_Keccak_is_shake(Hacl_Streaming_Keccak_state *s); + +void +Hacl_SHA3_shake128_hacl( + uint32_t inputByteLen, + uint8_t *input, + uint32_t outputByteLen, + uint8_t *output +); + +void +Hacl_SHA3_shake256_hacl( + uint32_t inputByteLen, + uint8_t *input, + uint32_t outputByteLen, + uint8_t *output +); + +void Hacl_SHA3_sha3_224(uint32_t inputByteLen, uint8_t *input, uint8_t *output); + +void Hacl_SHA3_sha3_256(uint32_t inputByteLen, uint8_t *input, uint8_t *output); + +void Hacl_SHA3_sha3_384(uint32_t inputByteLen, uint8_t *input, uint8_t *output); + +void Hacl_SHA3_sha3_512(uint32_t inputByteLen, uint8_t *input, uint8_t *output); + +void Hacl_Impl_SHA3_absorb_inner(uint32_t rateInBytes, uint8_t *block, uint64_t *s); + +void +Hacl_Impl_SHA3_squeeze( + uint64_t *s, + uint32_t rateInBytes, + uint32_t outputByteLen, + uint8_t *output +); + +void +Hacl_Impl_SHA3_keccak( + uint32_t rate, + uint32_t capacity, + uint32_t inputByteLen, + uint8_t *input, + uint8_t delimitedSuffix, + uint32_t outputByteLen, + uint8_t *output +); + +#if defined(__cplusplus) +} +#endif + +#define __Hacl_Hash_SHA3_H_DEFINED +#endif diff --git a/Modules/_hacl/Hacl_Streaming_Types.h b/Modules/_hacl/Hacl_Streaming_Types.h index 51057611ca978d..8a60b707bc4958 100644 --- a/Modules/_hacl/Hacl_Streaming_Types.h +++ b/Modules/_hacl/Hacl_Streaming_Types.h @@ -35,6 +35,23 @@ extern "C" { #include "krml/lowstar_endianness.h" #include "krml/internal/target.h" +#define Spec_Hash_Definitions_SHA2_224 0 +#define Spec_Hash_Definitions_SHA2_256 1 +#define Spec_Hash_Definitions_SHA2_384 2 +#define Spec_Hash_Definitions_SHA2_512 3 +#define Spec_Hash_Definitions_SHA1 4 +#define Spec_Hash_Definitions_MD5 5 +#define Spec_Hash_Definitions_Blake2S 6 +#define Spec_Hash_Definitions_Blake2B 7 +#define Spec_Hash_Definitions_SHA3_256 8 +#define Spec_Hash_Definitions_SHA3_224 9 +#define Spec_Hash_Definitions_SHA3_384 10 +#define Spec_Hash_Definitions_SHA3_512 11 +#define Spec_Hash_Definitions_Shake128 12 +#define Spec_Hash_Definitions_Shake256 13 + +typedef uint8_t Spec_Hash_Definitions_hash_alg; + typedef struct Hacl_Streaming_MD_state_32_s { uint32_t *block_state; diff --git a/Modules/_hacl/include/krml/internal/target.h b/Modules/_hacl/include/krml/internal/target.h index dcbe7007b17be8..5a2f94eb2ec8da 100644 --- a/Modules/_hacl/include/krml/internal/target.h +++ b/Modules/_hacl/include/krml/internal/target.h @@ -19,6 +19,28 @@ # define inline __inline__ #endif +/******************************************************************************/ +/* Macros that KaRaMeL will generate. */ +/******************************************************************************/ + +/* For "bare" targets that do not have a C stdlib, the user might want to use + * [-add-early-include '"mydefinitions.h"'] and override these. */ +#ifndef KRML_HOST_PRINTF +# define KRML_HOST_PRINTF printf +#endif + +#if ( \ + (defined __STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) && \ + (!(defined KRML_HOST_EPRINTF))) +# define KRML_HOST_EPRINTF(...) fprintf(stderr, __VA_ARGS__) +#elif !(defined KRML_HOST_EPRINTF) && defined(_MSC_VER) +# define KRML_HOST_EPRINTF(...) fprintf(stderr, __VA_ARGS__) +#endif + +#ifndef KRML_HOST_EXIT +# define KRML_HOST_EXIT exit +#endif + #ifndef KRML_HOST_MALLOC # define KRML_HOST_MALLOC malloc #endif @@ -35,6 +57,28 @@ # define KRML_HOST_IGNORE(x) (void)(x) #endif +/* In FStar.Buffer.fst, the size of arrays is uint32_t, but it's a number of + * *elements*. Do an ugly, run-time check (some of which KaRaMeL can eliminate). + */ +#if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ > 4)) +# define _KRML_CHECK_SIZE_PRAGMA \ + _Pragma("GCC diagnostic ignored \"-Wtype-limits\"") +#else +# define _KRML_CHECK_SIZE_PRAGMA +#endif + +#define KRML_CHECK_SIZE(size_elt, sz) \ + do { \ + _KRML_CHECK_SIZE_PRAGMA \ + if (((size_t)(sz)) > ((size_t)(SIZE_MAX / (size_elt)))) { \ + KRML_HOST_PRINTF( \ + "Maximum allocatable size exceeded, aborting before overflow at " \ + "%s:%d\n", \ + __FILE__, __LINE__); \ + KRML_HOST_EXIT(253); \ + } \ + } while (0) + /* Macros for prettier unrolling of loops */ #define KRML_LOOP1(i, n, x) { \ x \ diff --git a/Modules/_hacl/include/krml/lowstar_endianness.h b/Modules/_hacl/include/krml/lowstar_endianness.h index 32a7391e817ebb..1aa2ccd644c06f 100644 --- a/Modules/_hacl/include/krml/lowstar_endianness.h +++ b/Modules/_hacl/include/krml/lowstar_endianness.h @@ -77,7 +77,7 @@ # define le64toh(x) (x) /* ... for Windows (GCC-like, e.g. mingw or clang) */ -#elif (defined(_WIN32) || defined(_WIN64)) && \ +#elif (defined(_WIN32) || defined(_WIN64) || defined(__EMSCRIPTEN__)) && \ (defined(__GNUC__) || defined(__clang__)) # define htobe16(x) __builtin_bswap16(x) @@ -96,7 +96,8 @@ # define le64toh(x) (x) /* ... generic big-endian fallback code */ -#elif defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__ +/* ... AIX doesn't have __BYTE_ORDER__ (with XLC compiler) & is always big-endian */ +#elif (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__) || defined(_AIX) /* byte swapping code inspired by: * https://github.com/rweather/arduinolibs/blob/master/libraries/Crypto/utility/EndianUtil.h diff --git a/Modules/_hacl/internal/Hacl_Hash_SHA3.h b/Modules/_hacl/internal/Hacl_Hash_SHA3.h new file mode 100644 index 00000000000000..1c9808b8dd497c --- /dev/null +++ b/Modules/_hacl/internal/Hacl_Hash_SHA3.h @@ -0,0 +1,65 @@ +/* MIT License + * + * Copyright (c) 2016-2022 INRIA, CMU and Microsoft Corporation + * Copyright (c) 2022-2023 HACL* Contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + + +#ifndef __internal_Hacl_Hash_SHA3_H +#define __internal_Hacl_Hash_SHA3_H + +#if defined(__cplusplus) +extern "C" { +#endif + +#include +#include "krml/types.h" +#include "krml/lowstar_endianness.h" +#include "krml/internal/target.h" + +#include "../Hacl_Hash_SHA3.h" + +void +Hacl_Hash_SHA3_update_multi_sha3( + Spec_Hash_Definitions_hash_alg a, + uint64_t *s, + uint8_t *blocks, + uint32_t n_blocks +); + +void +Hacl_Hash_SHA3_update_last_sha3( + Spec_Hash_Definitions_hash_alg a, + uint64_t *s, + uint8_t *input, + uint32_t input_len +); + +void Hacl_Impl_SHA3_state_permute(uint64_t *s); + +void Hacl_Impl_SHA3_loadState(uint32_t rateInBytes, uint8_t *input, uint64_t *s); + +#if defined(__cplusplus) +} +#endif + +#define __internal_Hacl_Hash_SHA3_H_DEFINED +#endif diff --git a/Modules/_hacl/python_hacl_namespaces.h b/Modules/_hacl/python_hacl_namespaces.h index ee28f244266b85..0df236282ac509 100644 --- a/Modules/_hacl/python_hacl_namespaces.h +++ b/Modules/_hacl/python_hacl_namespaces.h @@ -59,5 +59,28 @@ #define Hacl_Streaming_SHA1_legacy_copy python_hashlib_Hacl_Streaming_SHA1_legacy_copy #define Hacl_Streaming_SHA1_legacy_hash python_hashlib_Hacl_Streaming_SHA1_legacy_hash +#define Hacl_Hash_SHA3_update_last_sha3 python_hashlib_Hacl_Hash_SHA3_update_last_sha3 +#define Hacl_Hash_SHA3_update_multi_sha3 python_hashlib_Hacl_Hash_SHA3_update_multi_sha3 +#define Hacl_Impl_SHA3_absorb_inner python_hashlib_Hacl_Impl_SHA3_absorb_inner +#define Hacl_Impl_SHA3_keccak python_hashlib_Hacl_Impl_SHA3_keccak +#define Hacl_Impl_SHA3_loadState python_hashlib_Hacl_Impl_SHA3_loadState +#define Hacl_Impl_SHA3_squeeze python_hashlib_Hacl_Impl_SHA3_squeeze +#define Hacl_Impl_SHA3_state_permute python_hashlib_Hacl_Impl_SHA3_state_permute +#define Hacl_SHA3_sha3_224 python_hashlib_Hacl_SHA3_sha3_224 +#define Hacl_SHA3_sha3_256 python_hashlib_Hacl_SHA3_sha3_256 +#define Hacl_SHA3_sha3_384 python_hashlib_Hacl_SHA3_sha3_384 +#define Hacl_SHA3_sha3_512 python_hashlib_Hacl_SHA3_sha3_512 +#define Hacl_SHA3_shake128_hacl python_hashlib_Hacl_SHA3_shake128_hacl +#define Hacl_SHA3_shake256_hacl python_hashlib_Hacl_SHA3_shake256_hacl +#define Hacl_Streaming_Keccak_block_len python_hashlib_Hacl_Streaming_Keccak_block_len +#define Hacl_Streaming_Keccak_copy python_hashlib_Hacl_Streaming_Keccak_copy +#define Hacl_Streaming_Keccak_finish python_hashlib_Hacl_Streaming_Keccak_finish +#define Hacl_Streaming_Keccak_free python_hashlib_Hacl_Streaming_Keccak_free +#define Hacl_Streaming_Keccak_get_alg python_hashlib_Hacl_Streaming_Keccak_get_alg +#define Hacl_Streaming_Keccak_hash_len python_hashlib_Hacl_Streaming_Keccak_hash_len +#define Hacl_Streaming_Keccak_is_shake python_hashlib_Hacl_Streaming_Keccak_is_shake +#define Hacl_Streaming_Keccak_malloc python_hashlib_Hacl_Streaming_Keccak_malloc +#define Hacl_Streaming_Keccak_reset python_hashlib_Hacl_Streaming_Keccak_reset +#define Hacl_Streaming_Keccak_update python_hashlib_Hacl_Streaming_Keccak_update #endif // _PYTHON_HACL_NAMESPACES_H diff --git a/Modules/_hacl/refresh.sh b/Modules/_hacl/refresh.sh index 76b92ec4599102..220ebbe5561341 100755 --- a/Modules/_hacl/refresh.sh +++ b/Modules/_hacl/refresh.sh @@ -22,7 +22,7 @@ fi # Update this when updating to a new version after verifying that the changes # the update brings in are good. -expected_hacl_star_rev=13e0c6721ac9206c4249ecc1dc04ed617ad1e262 +expected_hacl_star_rev=363eae2c2eb60e46f182ddd4bd1cd3f1d00b35c9 hacl_dir="$(realpath "$1")" cd "$(dirname "$0")" @@ -45,11 +45,14 @@ dist_files=( Hacl_Hash_SHA1.h internal/Hacl_Hash_SHA1.h Hacl_Hash_MD5.h + Hacl_Hash_SHA3.h internal/Hacl_Hash_MD5.h + internal/Hacl_Hash_SHA3.h internal/Hacl_SHA2_Generic.h Hacl_Streaming_SHA2.c Hacl_Hash_SHA1.c Hacl_Hash_MD5.c + Hacl_Hash_SHA3.c ) declare -a include_files @@ -134,9 +137,9 @@ $sed -i -z 's!#include \n!#include \n#include "python_hacl_n # Finally, we remove a bunch of ifdefs from target.h that are, again, useful in # the general case, but not exercised by the subset of HACL* that we vendor. -$sed -z -i 's!#ifndef KRML_\(HOST_PRINTF\|HOST_EXIT\|PRE_ALIGN\|POST_ALIGN\|ALIGNED_MALLOC\|ALIGNED_FREE\|HOST_TIME\)\n\(\n\|# [^\n]*\n\|[^#][^\n]*\n\)*#endif\n\n!!g' include/krml/internal/target.h -$sed -z -i 's!\n\n\([^#][^\n]*\n\)*#define KRML_\(EABORT\|EXIT\|CHECK_SIZE\)[^\n]*\(\n [^\n]*\)*!!g' include/krml/internal/target.h +$sed -z -i 's!#ifndef KRML_\(PRE_ALIGN\|POST_ALIGN\|ALIGNED_MALLOC\|ALIGNED_FREE\|HOST_TIME\)\n\(\n\|# [^\n]*\n\|[^#][^\n]*\n\)*#endif\n\n!!g' include/krml/internal/target.h +$sed -z -i 's!\n\n\([^#][^\n]*\n\)*#define KRML_\(EABORT\|EXIT\)[^\n]*\(\n [^\n]*\)*!!g' include/krml/internal/target.h $sed -z -i 's!\n\n\([^#][^\n]*\n\)*#if [^\n]*\n\( [^\n]*\n\)*#define KRML_\(EABORT\|EXIT\|CHECK_SIZE\)[^\n]*\(\n [^\n]*\)*!!g' include/krml/internal/target.h -$sed -z -i 's!\n\n\([^#][^\n]*\n\)*#if [^\n]*\n\( [^\n]*\n\)*# define _\?KRML_\(DEPRECATED\|CHECK_SIZE_PRAGMA\|HOST_EPRINTF\|HOST_SNPRINTF\)[^\n]*\n\([^#][^\n]*\n\|#el[^\n]*\n\|# [^\n]*\n\)*#endif!!g' include/krml/internal/target.h +$sed -z -i 's!\n\n\([^#][^\n]*\n\)*#if [^\n]*\n\( [^\n]*\n\)*# define _\?KRML_\(DEPRECATED\|HOST_SNPRINTF\)[^\n]*\n\([^#][^\n]*\n\|#el[^\n]*\n\|# [^\n]*\n\)*#endif!!g' include/krml/internal/target.h echo "Updated; verify all is okay using git diff and git status." diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c index 7476e5dc7dd61e..99d0b72819137e 100644 --- a/Modules/_hashopenssl.c +++ b/Modules/_hashopenssl.c @@ -2260,6 +2260,7 @@ static PyModuleDef_Slot hashlib_slots[] = { {Py_mod_exec, hashlib_md_meth_names}, {Py_mod_exec, hashlib_init_constructors}, {Py_mod_exec, hashlib_exception}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_heapqmodule.c b/Modules/_heapqmodule.c index 07ddc7b0851241..00285ae01f8574 100644 --- a/Modules/_heapqmodule.c +++ b/Modules/_heapqmodule.c @@ -682,6 +682,7 @@ heapq_exec(PyObject *m) static struct PyModuleDef_Slot heapq_slots[] = { {Py_mod_exec, heapq_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_io/_iomodule.c b/Modules/_io/_iomodule.c index a3bfbc9ac5a1b1..c05407b5d61815 100644 --- a/Modules/_io/_iomodule.c +++ b/Modules/_io/_iomodule.c @@ -321,7 +321,7 @@ _io_open_impl(PyObject *module, PyObject *file, const char *mode, #ifdef HAVE_WINDOWS_CONSOLE_IO const PyConfig *config = _Py_GetConfig(); if (!config->legacy_windows_stdio && _PyIO_get_console_type(path_or_fd) != '\0') { - RawIO_class = (PyObject *)&PyWindowsConsoleIO_Type; + RawIO_class = (PyObject *)state->PyWindowsConsoleIO_Type; encoding = "utf-8"; } #endif @@ -580,17 +580,24 @@ iomodule_traverse(PyObject *mod, visitproc visit, void *arg) { _PyIO_State *state = get_io_state(mod); if (!state->initialized) return 0; - Py_VISIT(state->locale_module); Py_VISIT(state->unsupported_operation); + Py_VISIT(state->PyIncrementalNewlineDecoder_Type); + Py_VISIT(state->PyRawIOBase_Type); + Py_VISIT(state->PyBufferedIOBase_Type); Py_VISIT(state->PyBufferedRWPair_Type); Py_VISIT(state->PyBufferedRandom_Type); Py_VISIT(state->PyBufferedReader_Type); Py_VISIT(state->PyBufferedWriter_Type); + Py_VISIT(state->PyBytesIOBuffer_Type); Py_VISIT(state->PyBytesIO_Type); Py_VISIT(state->PyFileIO_Type); Py_VISIT(state->PyStringIO_Type); + Py_VISIT(state->PyTextIOBase_Type); Py_VISIT(state->PyTextIOWrapper_Type); +#ifdef HAVE_WINDOWS_CONSOLE_IO + Py_VISIT(state->PyWindowsConsoleIO_Type); +#endif return 0; } @@ -600,18 +607,24 @@ iomodule_clear(PyObject *mod) { _PyIO_State *state = get_io_state(mod); if (!state->initialized) return 0; - if (state->locale_module != NULL) - Py_CLEAR(state->locale_module); Py_CLEAR(state->unsupported_operation); + Py_CLEAR(state->PyIncrementalNewlineDecoder_Type); + Py_CLEAR(state->PyRawIOBase_Type); + Py_CLEAR(state->PyBufferedIOBase_Type); Py_CLEAR(state->PyBufferedRWPair_Type); Py_CLEAR(state->PyBufferedRandom_Type); Py_CLEAR(state->PyBufferedReader_Type); Py_CLEAR(state->PyBufferedWriter_Type); + Py_CLEAR(state->PyBytesIOBuffer_Type); Py_CLEAR(state->PyBytesIO_Type); Py_CLEAR(state->PyFileIO_Type); Py_CLEAR(state->PyStringIO_Type); + Py_CLEAR(state->PyTextIOBase_Type); Py_CLEAR(state->PyTextIOWrapper_Type); +#ifdef HAVE_WINDOWS_CONSOLE_IO + Py_CLEAR(state->PyWindowsConsoleIO_Type); +#endif return 0; } @@ -653,34 +666,20 @@ struct PyModuleDef _PyIO_Module = { static PyTypeObject* static_types[] = { // Base classes &PyIOBase_Type, - &PyIncrementalNewlineDecoder_Type, // PyIOBase_Type subclasses &PyBufferedIOBase_Type, &PyRawIOBase_Type, &PyTextIOBase_Type, - - // PyRawIOBase_Type(PyIOBase_Type) subclasses - &_PyBytesIOBuffer_Type, -#ifdef HAVE_WINDOWS_CONSOLE_IO - &PyWindowsConsoleIO_Type, -#endif }; PyStatus _PyIO_InitTypes(PyInterpreterState *interp) { -#ifdef HAVE_WINDOWS_CONSOLE_IO - if (_Py_IsMainInterpreter(interp)) { - // Set type base classes - PyWindowsConsoleIO_Type.tp_base = &PyRawIOBase_Type; - } -#endif - for (size_t i=0; i < Py_ARRAY_LENGTH(static_types); i++) { PyTypeObject *type = static_types[i]; - if (_PyStaticType_InitBuiltin(type) < 0) { + if (_PyStaticType_InitBuiltin(interp, type) < 0) { return _PyStatus_ERR("Can't initialize builtin type"); } } @@ -691,15 +690,11 @@ _PyIO_InitTypes(PyInterpreterState *interp) void _PyIO_FiniTypes(PyInterpreterState *interp) { - if (!_Py_IsMainInterpreter(interp)) { - return; - } - // Deallocate types in the reverse order to deallocate subclasses before // their base classes. for (Py_ssize_t i=Py_ARRAY_LENGTH(static_types) - 1; i >= 0; i--) { PyTypeObject *type = static_types[i]; - _PyStaticType_Dealloc(type); + _PyStaticType_Dealloc(interp, type); } } @@ -753,24 +748,37 @@ PyInit__io(void) } } + // Base classes + ADD_TYPE(m, state->PyIncrementalNewlineDecoder_Type, &nldecoder_spec, NULL); + ADD_TYPE(m, state->PyBytesIOBuffer_Type, &bytesiobuf_spec, NULL); + + // PyIOBase_Type subclasses + state->PyRawIOBase_Type = (PyTypeObject *)Py_NewRef(&PyRawIOBase_Type); + state->PyBufferedIOBase_Type = (PyTypeObject *)Py_NewRef(&PyBufferedIOBase_Type); + state->PyTextIOBase_Type = (PyTypeObject *)Py_NewRef(&PyTextIOBase_Type); + // PyBufferedIOBase_Type(PyIOBase_Type) subclasses - ADD_TYPE(m, state->PyBytesIO_Type, &bytesio_spec, &PyBufferedIOBase_Type); + ADD_TYPE(m, state->PyBytesIO_Type, &bytesio_spec, state->PyBufferedIOBase_Type); ADD_TYPE(m, state->PyBufferedWriter_Type, &bufferedwriter_spec, - &PyBufferedIOBase_Type); + state->PyBufferedIOBase_Type); ADD_TYPE(m, state->PyBufferedReader_Type, &bufferedreader_spec, - &PyBufferedIOBase_Type); + state->PyBufferedIOBase_Type); ADD_TYPE(m, state->PyBufferedRWPair_Type, &bufferedrwpair_spec, - &PyBufferedIOBase_Type); + state->PyBufferedIOBase_Type); ADD_TYPE(m, state->PyBufferedRandom_Type, &bufferedrandom_spec, - &PyBufferedIOBase_Type); + state->PyBufferedIOBase_Type); // PyRawIOBase_Type(PyIOBase_Type) subclasses - ADD_TYPE(m, state->PyFileIO_Type, &fileio_spec, &PyRawIOBase_Type); + ADD_TYPE(m, state->PyFileIO_Type, &fileio_spec, state->PyRawIOBase_Type); +#ifdef MS_WINDOWS + ADD_TYPE(m, state->PyWindowsConsoleIO_Type, &winconsoleio_spec, + state->PyRawIOBase_Type); +#endif // PyTextIOBase_Type(PyIOBase_Type) subclasses - ADD_TYPE(m, state->PyStringIO_Type, &stringio_spec, &PyTextIOBase_Type); + ADD_TYPE(m, state->PyStringIO_Type, &stringio_spec, state->PyTextIOBase_Type); ADD_TYPE(m, state->PyTextIOWrapper_Type, &textiowrapper_spec, - &PyTextIOBase_Type); + state->PyTextIOBase_Type); state->initialized = 1; diff --git a/Modules/_io/_iomodule.h b/Modules/_io/_iomodule.h index d7224e56f9a722..1bf301c9cf0a94 100644 --- a/Modules/_io/_iomodule.h +++ b/Modules/_io/_iomodule.h @@ -5,6 +5,7 @@ #include "exports.h" #include "pycore_moduleobject.h" // _PyModule_GetState() +#include "pycore_typeobject.h" // _PyType_GetModuleState() #include "structmember.h" /* ABCs */ @@ -13,22 +14,21 @@ extern PyTypeObject PyRawIOBase_Type; extern PyTypeObject PyBufferedIOBase_Type; extern PyTypeObject PyTextIOBase_Type; -/* Concrete classes */ -extern PyTypeObject PyIncrementalNewlineDecoder_Type; - /* Type specs */ extern PyType_Spec bufferedrandom_spec; extern PyType_Spec bufferedreader_spec; extern PyType_Spec bufferedrwpair_spec; extern PyType_Spec bufferedwriter_spec; extern PyType_Spec bytesio_spec; +extern PyType_Spec bytesiobuf_spec; extern PyType_Spec fileio_spec; +extern PyType_Spec nldecoder_spec; extern PyType_Spec stringio_spec; extern PyType_Spec textiowrapper_spec; #ifdef HAVE_WINDOWS_CONSOLE_IO -extern PyTypeObject PyWindowsConsoleIO_Type; -#endif /* HAVE_WINDOWS_CONSOLE_IO */ +extern PyType_Spec winconsoleio_spec; +#endif /* These functions are used as METH_NOARGS methods, are normally called * with args=NULL, and return a new reference. @@ -142,19 +142,25 @@ extern PyModuleDef _PyIO_Module; typedef struct { int initialized; - PyObject *locale_module; - PyObject *unsupported_operation; /* Types */ + PyTypeObject *PyIncrementalNewlineDecoder_Type; + PyTypeObject *PyRawIOBase_Type; + PyTypeObject *PyBufferedIOBase_Type; PyTypeObject *PyBufferedRWPair_Type; PyTypeObject *PyBufferedRandom_Type; PyTypeObject *PyBufferedReader_Type; PyTypeObject *PyBufferedWriter_Type; + PyTypeObject *PyBytesIOBuffer_Type; PyTypeObject *PyBytesIO_Type; PyTypeObject *PyFileIO_Type; PyTypeObject *PyStringIO_Type; + PyTypeObject *PyTextIOBase_Type; PyTypeObject *PyTextIOWrapper_Type; +#ifdef MS_WINDOWS + PyTypeObject *PyWindowsConsoleIO_Type; +#endif } _PyIO_State; #define IO_MOD_STATE(mod) ((_PyIO_State *)PyModule_GetState(mod)) @@ -168,6 +174,14 @@ get_io_state(PyObject *module) return (_PyIO_State *)state; } +static inline _PyIO_State * +get_io_state_by_cls(PyTypeObject *cls) +{ + void *state = _PyType_GetModuleState(cls); + assert(state != NULL); + return (_PyIO_State *)state; +} + static inline _PyIO_State * find_io_state_by_def(PyTypeObject *type) { @@ -181,5 +195,3 @@ extern _PyIO_State *_PyIO_get_module_state(void); #ifdef HAVE_WINDOWS_CONSOLE_IO extern char _PyIO_get_console_type(PyObject *); #endif - -extern Py_EXPORTED_SYMBOL PyTypeObject _PyBytesIOBuffer_Type; diff --git a/Modules/_io/bufferedio.c b/Modules/_io/bufferedio.c index 2c71768be97870..723d16b47fef9f 100644 --- a/Modules/_io/bufferedio.c +++ b/Modules/_io/bufferedio.c @@ -2231,7 +2231,7 @@ bufferedrwpair_close(rwpair *self, PyObject *Py_UNUSED(ignored)) } else { Py_DECREF(ret); - } + } ret = _forward_call(self->reader, &_Py_ID(close), NULL); if (exc != NULL) { _PyErr_ChainExceptions1(exc); diff --git a/Modules/_io/bytesio.c b/Modules/_io/bytesio.c index 7e9d28b3b9655c..3fddfc2ed0bc9c 100644 --- a/Modules/_io/bytesio.c +++ b/Modules/_io/bytesio.c @@ -308,14 +308,18 @@ _io_BytesIO_flush_impl(bytesio *self) /*[clinic input] _io.BytesIO.getbuffer + cls: defining_class + / + Get a read-write view over the contents of the BytesIO object. [clinic start generated code]*/ static PyObject * -_io_BytesIO_getbuffer_impl(bytesio *self) -/*[clinic end generated code: output=72cd7c6e13aa09ed input=8f738ef615865176]*/ +_io_BytesIO_getbuffer_impl(bytesio *self, PyTypeObject *cls) +/*[clinic end generated code: output=045091d7ce87fe4e input=0668fbb48f95dffa]*/ { - PyTypeObject *type = &_PyBytesIOBuffer_Type; + _PyIO_State *state = get_io_state_by_cls(cls); + PyTypeObject *type = state->PyBytesIOBuffer_Type; bytesiobuf *buf; PyObject *view; @@ -1086,9 +1090,17 @@ bytesiobuf_releasebuffer(bytesiobuf *obj, Py_buffer *view) b->exports--; } +static int +bytesiobuf_clear(bytesiobuf *self) +{ + Py_CLEAR(self->source); + return 0; +} + static int bytesiobuf_traverse(bytesiobuf *self, visitproc visit, void *arg) { + Py_VISIT(Py_TYPE(self)); Py_VISIT(self->source); return 0; } @@ -1096,54 +1108,29 @@ bytesiobuf_traverse(bytesiobuf *self, visitproc visit, void *arg) static void bytesiobuf_dealloc(bytesiobuf *self) { + PyTypeObject *tp = Py_TYPE(self); /* bpo-31095: UnTrack is needed before calling any callbacks */ PyObject_GC_UnTrack(self); - Py_CLEAR(self->source); - Py_TYPE(self)->tp_free(self); + (void)bytesiobuf_clear(self); + tp->tp_free(self); + Py_DECREF(tp); } -static PyBufferProcs bytesiobuf_as_buffer = { - (getbufferproc) bytesiobuf_getbuffer, - (releasebufferproc) bytesiobuf_releasebuffer, +static PyType_Slot bytesiobuf_slots[] = { + {Py_tp_dealloc, bytesiobuf_dealloc}, + {Py_tp_traverse, bytesiobuf_traverse}, + {Py_tp_clear, bytesiobuf_clear}, + + // Buffer protocol + {Py_bf_getbuffer, bytesiobuf_getbuffer}, + {Py_bf_releasebuffer, bytesiobuf_releasebuffer}, + {0, NULL}, }; -Py_EXPORTED_SYMBOL PyTypeObject _PyBytesIOBuffer_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_io._BytesIOBuffer", /*tp_name*/ - sizeof(bytesiobuf), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - (destructor)bytesiobuf_dealloc, /*tp_dealloc*/ - 0, /*tp_vectorcall_offset*/ - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - 0, /*tp_as_async*/ - 0, /*tp_repr*/ - 0, /*tp_as_number*/ - 0, /*tp_as_sequence*/ - 0, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - &bytesiobuf_as_buffer, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /*tp_flags*/ - 0, /*tp_doc*/ - (traverseproc)bytesiobuf_traverse, /*tp_traverse*/ - 0, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - 0, /*tp_methods*/ - 0, /*tp_members*/ - 0, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - 0, /*tp_dictoffset*/ - 0, /*tp_init*/ - 0, /*tp_alloc*/ - 0, /*tp_new*/ +PyType_Spec bytesiobuf_spec = { + .name = "_io._BytesIOBuffer", + .basicsize = sizeof(bytesiobuf), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE | Py_TPFLAGS_DISALLOW_INSTANTIATION), + .slots = bytesiobuf_slots, }; diff --git a/Modules/_io/clinic/bytesio.c.h b/Modules/_io/clinic/bytesio.c.h index 84b58db6c7a702..9550c8728c251e 100644 --- a/Modules/_io/clinic/bytesio.c.h +++ b/Modules/_io/clinic/bytesio.c.h @@ -87,15 +87,19 @@ PyDoc_STRVAR(_io_BytesIO_getbuffer__doc__, "Get a read-write view over the contents of the BytesIO object."); #define _IO_BYTESIO_GETBUFFER_METHODDEF \ - {"getbuffer", (PyCFunction)_io_BytesIO_getbuffer, METH_NOARGS, _io_BytesIO_getbuffer__doc__}, + {"getbuffer", _PyCFunction_CAST(_io_BytesIO_getbuffer), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _io_BytesIO_getbuffer__doc__}, static PyObject * -_io_BytesIO_getbuffer_impl(bytesio *self); +_io_BytesIO_getbuffer_impl(bytesio *self, PyTypeObject *cls); static PyObject * -_io_BytesIO_getbuffer(bytesio *self, PyObject *Py_UNUSED(ignored)) +_io_BytesIO_getbuffer(bytesio *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { - return _io_BytesIO_getbuffer_impl(self); + if (nargs) { + PyErr_SetString(PyExc_TypeError, "getbuffer() takes no arguments"); + return NULL; + } + return _io_BytesIO_getbuffer_impl(self, cls); } PyDoc_STRVAR(_io_BytesIO_getvalue__doc__, @@ -534,4 +538,4 @@ _io_BytesIO___init__(PyObject *self, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=a44770efbaeb80dd input=a9049054013a1b77]*/ +/*[clinic end generated code: output=098584d485420b65 input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/fileio.c.h b/Modules/_io/clinic/fileio.c.h index b6e9bd5b65a029..dfad8a58c4723e 100644 --- a/Modules/_io/clinic/fileio.c.h +++ b/Modules/_io/clinic/fileio.c.h @@ -18,15 +18,19 @@ PyDoc_STRVAR(_io_FileIO_close__doc__, "called more than once without error."); #define _IO_FILEIO_CLOSE_METHODDEF \ - {"close", (PyCFunction)_io_FileIO_close, METH_NOARGS, _io_FileIO_close__doc__}, + {"close", _PyCFunction_CAST(_io_FileIO_close), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _io_FileIO_close__doc__}, static PyObject * -_io_FileIO_close_impl(fileio *self); +_io_FileIO_close_impl(fileio *self, PyTypeObject *cls); static PyObject * -_io_FileIO_close(fileio *self, PyObject *Py_UNUSED(ignored)) +_io_FileIO_close(fileio *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { - return _io_FileIO_close_impl(self); + if (nargs) { + PyErr_SetString(PyExc_TypeError, "close() takes no arguments"); + return NULL; + } + return _io_FileIO_close_impl(self, cls); } PyDoc_STRVAR(_io_FileIO___init____doc__, @@ -466,4 +470,4 @@ _io_FileIO_isatty(fileio *self, PyObject *Py_UNUSED(ignored)) #ifndef _IO_FILEIO_TRUNCATE_METHODDEF #define _IO_FILEIO_TRUNCATE_METHODDEF #endif /* !defined(_IO_FILEIO_TRUNCATE_METHODDEF) */ -/*[clinic end generated code: output=27f883807a6c29ae input=a9049054013a1b77]*/ +/*[clinic end generated code: output=29ed2ae6c451c139 input=a9049054013a1b77]*/ diff --git a/Modules/_io/fileio.c b/Modules/_io/fileio.c index 1118d86e6c9a10..cc0e7307b9da77 100644 --- a/Modules/_io/fileio.c +++ b/Modules/_io/fileio.c @@ -130,6 +130,9 @@ internal_close(fileio *self) /*[clinic input] _io.FileIO.close + cls: defining_class + / + Close the file. A closed file cannot be used for further I/O operations. close() may be @@ -137,18 +140,20 @@ called more than once without error. [clinic start generated code]*/ static PyObject * -_io_FileIO_close_impl(fileio *self) -/*[clinic end generated code: output=7737a319ef3bad0b input=f35231760d54a522]*/ +_io_FileIO_close_impl(fileio *self, PyTypeObject *cls) +/*[clinic end generated code: output=c30cbe9d1f23ca58 input=70da49e63db7c64d]*/ { PyObject *res; - PyObject *exc; int rc; - res = PyObject_CallMethodOneArg((PyObject*)&PyRawIOBase_Type, + _PyIO_State *state = get_io_state_by_cls(cls); + res = PyObject_CallMethodOneArg((PyObject*)state->PyRawIOBase_Type, &_Py_ID(close), (PyObject *)self); if (!self->closefd) { self->fd = -1; return res; } + + PyObject *exc; if (res == NULL) { exc = PyErr_GetRaisedException(); } diff --git a/Modules/_io/stringio.c b/Modules/_io/stringio.c index 54c050f0be4688..13d3b870b39a81 100644 --- a/Modules/_io/stringio.c +++ b/Modules/_io/stringio.c @@ -716,9 +716,10 @@ _io_StringIO___init___impl(stringio *self, PyObject *value, self->writenl = Py_NewRef(self->readnl); } + _PyIO_State *module_state = find_io_state_by_def(Py_TYPE(self)); if (self->readuniversal) { self->decoder = PyObject_CallFunctionObjArgs( - (PyObject *)&PyIncrementalNewlineDecoder_Type, + (PyObject *)module_state->PyIncrementalNewlineDecoder_Type, Py_None, self->readtranslate ? Py_True : Py_False, NULL); if (self->decoder == NULL) return -1; @@ -750,7 +751,7 @@ _io_StringIO___init___impl(stringio *self, PyObject *value, self->state = STATE_ACCUMULATING; } self->pos = 0; - self->module_state = find_io_state_by_def(Py_TYPE(self)); + self->module_state = module_state; self->closed = 0; self->ok = 1; return 0; diff --git a/Modules/_io/textio.c b/Modules/_io/textio.c index 3ff84cb623af74..070687a83d1bc2 100644 --- a/Modules/_io/textio.c +++ b/Modules/_io/textio.c @@ -18,10 +18,10 @@ /*[clinic input] module _io -class _io.IncrementalNewlineDecoder "nldecoder_object *" "&PyIncrementalNewlineDecoder_Type" +class _io.IncrementalNewlineDecoder "nldecoder_object *" "clinic_state()->PyIncrementalNewlineDecoder_Type" class _io.TextIOWrapper "textio *" "clinic_state()->TextIOWrapper_Type" [clinic start generated code]*/ -/*[clinic end generated code: output=da39a3ee5e6b4b0d input=d3f032e90f74c8f2]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=81f67cf54eaa6001]*/ /* TextIOBase */ @@ -248,12 +248,32 @@ _io_IncrementalNewlineDecoder___init___impl(nldecoder_object *self, return 0; } -static void -incrementalnewlinedecoder_dealloc(nldecoder_object *self) +static int +incrementalnewlinedecoder_traverse(nldecoder_object *self, visitproc visit, + void *arg) +{ + Py_VISIT(Py_TYPE(self)); + Py_VISIT(self->decoder); + Py_VISIT(self->errors); + return 0; +} + +static int +incrementalnewlinedecoder_clear(nldecoder_object *self) { Py_CLEAR(self->decoder); Py_CLEAR(self->errors); - Py_TYPE(self)->tp_free((PyObject *)self); + return 0; +} + +static void +incrementalnewlinedecoder_dealloc(nldecoder_object *self) +{ + PyTypeObject *tp = Py_TYPE(self); + _PyObject_GC_UNTRACK(self); + (void)incrementalnewlinedecoder_clear(self); + tp->tp_free((PyObject *)self); + Py_DECREF(tp); } static int @@ -872,8 +892,9 @@ _textiowrapper_set_decoder(textio *self, PyObject *codec_info, return -1; if (self->readuniversal) { + _PyIO_State *state = self->state; PyObject *incrementalDecoder = PyObject_CallFunctionObjArgs( - (PyObject *)&PyIncrementalNewlineDecoder_Type, + (PyObject *)state->PyIncrementalNewlineDecoder_Type, self->decoder, self->readtranslate ? Py_True : Py_False, NULL); if (incrementalDecoder == NULL) return -1; @@ -884,11 +905,12 @@ _textiowrapper_set_decoder(textio *self, PyObject *codec_info, } static PyObject* -_textiowrapper_decode(PyObject *decoder, PyObject *bytes, int eof) +_textiowrapper_decode(_PyIO_State *state, PyObject *decoder, PyObject *bytes, + int eof) { PyObject *chars; - if (Py_IS_TYPE(decoder, &PyIncrementalNewlineDecoder_Type)) + if (Py_IS_TYPE(decoder, state->PyIncrementalNewlineDecoder_Type)) chars = _PyIncrementalNewlineDecoder_decode(decoder, bytes, eof); else chars = PyObject_CallMethodObjArgs(decoder, &_Py_ID(decode), bytes, @@ -1167,6 +1189,8 @@ _io_TextIOWrapper___init___impl(textio *self, PyObject *buffer, self->buffer = Py_NewRef(buffer); /* Build the decoder object */ + _PyIO_State *state = find_io_state_by_def(Py_TYPE(self)); + self->state = state; if (_textiowrapper_set_decoder(self, codec_info, PyUnicode_AsUTF8(errors)) != 0) goto error; @@ -1177,7 +1201,6 @@ _io_TextIOWrapper___init___impl(textio *self, PyObject *buffer, /* Finished sorting out the codec details */ Py_CLEAR(codec_info); - _PyIO_State *state = find_io_state_by_def(Py_TYPE(self)); if (Py_IS_TYPE(buffer, state->PyBufferedReader_Type) || Py_IS_TYPE(buffer, state->PyBufferedWriter_Type) || Py_IS_TYPE(buffer, state->PyBufferedRandom_Type)) @@ -1214,7 +1237,6 @@ _io_TextIOWrapper___init___impl(textio *self, PyObject *buffer, goto error; } - self->state = state; self->ok = 1; return 0; @@ -1843,7 +1865,8 @@ textiowrapper_read_chunk(textio *self, Py_ssize_t size_hint) nbytes = input_chunk_buf.len; eof = (nbytes == 0); - decoded_chars = _textiowrapper_decode(self->decoder, input_chunk, eof); + decoded_chars = _textiowrapper_decode(self->state, self->decoder, + input_chunk, eof); PyBuffer_Release(&input_chunk_buf); if (decoded_chars == NULL) goto fail; @@ -1913,7 +1936,8 @@ _io_TextIOWrapper_read_impl(textio *self, Py_ssize_t n) if (bytes == NULL) goto fail; - if (Py_IS_TYPE(self->decoder, &PyIncrementalNewlineDecoder_Type)) + _PyIO_State *state = self->state; + if (Py_IS_TYPE(self->decoder, state->PyIncrementalNewlineDecoder_Type)) decoded = _PyIncrementalNewlineDecoder_decode(self->decoder, bytes, 1); else @@ -3172,45 +3196,23 @@ static PyGetSetDef incrementalnewlinedecoder_getset[] = { {NULL} }; -PyTypeObject PyIncrementalNewlineDecoder_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_io.IncrementalNewlineDecoder", /*tp_name*/ - sizeof(nldecoder_object), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - (destructor)incrementalnewlinedecoder_dealloc, /*tp_dealloc*/ - 0, /*tp_vectorcall_offset*/ - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - 0, /*tp_as_async*/ - 0, /*tp_repr*/ - 0, /*tp_as_number*/ - 0, /*tp_as_sequence*/ - 0, /*tp_as_mapping*/ - 0, /*tp_hash */ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /*tp_flags*/ - _io_IncrementalNewlineDecoder___init____doc__, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /*tp_weaklistoffset*/ - 0, /* tp_iter */ - 0, /* tp_iternext */ - incrementalnewlinedecoder_methods, /* tp_methods */ - 0, /* tp_members */ - incrementalnewlinedecoder_getset, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - _io_IncrementalNewlineDecoder___init__, /* tp_init */ - 0, /* tp_alloc */ - PyType_GenericNew, /* tp_new */ +static PyType_Slot nldecoder_slots[] = { + {Py_tp_dealloc, incrementalnewlinedecoder_dealloc}, + {Py_tp_doc, (void *)_io_IncrementalNewlineDecoder___init____doc__}, + {Py_tp_methods, incrementalnewlinedecoder_methods}, + {Py_tp_getset, incrementalnewlinedecoder_getset}, + {Py_tp_traverse, incrementalnewlinedecoder_traverse}, + {Py_tp_clear, incrementalnewlinedecoder_clear}, + {Py_tp_init, _io_IncrementalNewlineDecoder___init__}, + {0, NULL}, +}; + +PyType_Spec nldecoder_spec = { + .name = "_io.IncrementalNewlineDecoder", + .basicsize = sizeof(nldecoder_object), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = nldecoder_slots, }; diff --git a/Modules/_io/winconsoleio.c b/Modules/_io/winconsoleio.c index f836e230243020..fdb57cff7c04d6 100644 --- a/Modules/_io/winconsoleio.c +++ b/Modules/_io/winconsoleio.c @@ -137,9 +137,9 @@ char _PyIO_get_console_type(PyObject *path_or_fd) { /*[clinic input] module _io -class _io._WindowsConsoleIO "winconsoleio *" "&PyWindowsConsoleIO_Type" +class _io._WindowsConsoleIO "winconsoleio *" "clinic_state()->PyWindowsConsoleIO_Type" [clinic start generated code]*/ -/*[clinic end generated code: output=da39a3ee5e6b4b0d input=e897fdc1fba4e131]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=05526e723011ab36]*/ typedef struct { PyObject_HEAD @@ -156,8 +156,6 @@ typedef struct { wchar_t wbuf; } winconsoleio; -PyTypeObject PyWindowsConsoleIO_Type; - int _PyWindowsConsoleIO_closed(PyObject *self) { @@ -265,7 +263,10 @@ _io__WindowsConsoleIO___init___impl(winconsoleio *self, PyObject *nameobj, int fd_is_own = 0; HANDLE handle = NULL; - assert(PyObject_TypeCheck(self, (PyTypeObject *)&PyWindowsConsoleIO_Type)); +#ifdef Py_DEBUG + _PyIO_State *state = find_io_state_by_def(Py_TYPE(self)); + assert(PyObject_TypeCheck(self, state->PyWindowsConsoleIO_Type)); +#endif if (self->fd >= 0) { if (self->closefd) { /* Have to close the existing file first. */ @@ -417,6 +418,7 @@ _io__WindowsConsoleIO___init___impl(winconsoleio *self, PyObject *nameobj, static int winconsoleio_traverse(winconsoleio *self, visitproc visit, void *arg) { + Py_VISIT(Py_TYPE(self)); Py_VISIT(self->dict); return 0; } @@ -431,6 +433,7 @@ winconsoleio_clear(winconsoleio *self) static void winconsoleio_dealloc(winconsoleio *self) { + PyTypeObject *tp = Py_TYPE(self); self->finalizing = 1; if (_PyIOBase_finalize((PyObject *) self) < 0) return; @@ -438,7 +441,8 @@ winconsoleio_dealloc(winconsoleio *self) if (self->weakreflist != NULL) PyObject_ClearWeakRefs((PyObject *) self); Py_CLEAR(self->dict); - Py_TYPE(self)->tp_free((PyObject *)self); + tp->tp_free((PyObject *)self); + Py_DECREF(tp); } static PyObject * @@ -1078,7 +1082,9 @@ _io__WindowsConsoleIO_isatty_impl(winconsoleio *self) Py_RETURN_TRUE; } +#define clinic_state() (IO_STATE()) #include "clinic/winconsoleio.c.h" +#undef clinic_state static PyMethodDef winconsoleio_methods[] = { _IO__WINDOWSCONSOLEIO_READ_METHODDEF @@ -1124,59 +1130,32 @@ static PyGetSetDef winconsoleio_getsetlist[] = { static PyMemberDef winconsoleio_members[] = { {"_blksize", T_UINT, offsetof(winconsoleio, blksize), 0}, {"_finalizing", T_BOOL, offsetof(winconsoleio, finalizing), 0}, + {"__weaklistoffset__", T_PYSSIZET, offsetof(winconsoleio, weakreflist), READONLY}, + {"__dictoffset__", T_PYSSIZET, offsetof(winconsoleio, dict), READONLY}, {NULL} }; -PyTypeObject PyWindowsConsoleIO_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_io._WindowsConsoleIO", - sizeof(winconsoleio), - 0, - (destructor)winconsoleio_dealloc, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - (reprfunc)winconsoleio_repr, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - PyObject_GenericGetAttr, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE - | Py_TPFLAGS_HAVE_GC, /* tp_flags */ - _io__WindowsConsoleIO___init____doc__, /* tp_doc */ - (traverseproc)winconsoleio_traverse, /* tp_traverse */ - (inquiry)winconsoleio_clear, /* tp_clear */ - 0, /* tp_richcompare */ - offsetof(winconsoleio, weakreflist), /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - winconsoleio_methods, /* tp_methods */ - winconsoleio_members, /* tp_members */ - winconsoleio_getsetlist, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - offsetof(winconsoleio, dict), /* tp_dictoffset */ - _io__WindowsConsoleIO___init__, /* tp_init */ - PyType_GenericAlloc, /* tp_alloc */ - winconsoleio_new, /* tp_new */ - PyObject_GC_Del, /* tp_free */ - 0, /* tp_is_gc */ - 0, /* tp_bases */ - 0, /* tp_mro */ - 0, /* tp_cache */ - 0, /* tp_subclasses */ - 0, /* tp_weaklist */ - 0, /* tp_del */ - 0, /* tp_version_tag */ - 0, /* tp_finalize */ +static PyType_Slot winconsoleio_slots[] = { + {Py_tp_dealloc, winconsoleio_dealloc}, + {Py_tp_repr, winconsoleio_repr}, + {Py_tp_getattro, PyObject_GenericGetAttr}, + {Py_tp_doc, (void *)_io__WindowsConsoleIO___init____doc__}, + {Py_tp_traverse, winconsoleio_traverse}, + {Py_tp_clear, winconsoleio_clear}, + {Py_tp_methods, winconsoleio_methods}, + {Py_tp_members, winconsoleio_members}, + {Py_tp_getset, winconsoleio_getsetlist}, + {Py_tp_init, _io__WindowsConsoleIO___init__}, + {Py_tp_new, winconsoleio_new}, + {0, NULL}, +}; + +PyType_Spec winconsoleio_spec = { + .name = "_io._WindowsConsoleIO", + .basicsize = sizeof(winconsoleio), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = winconsoleio_slots, }; #endif /* HAVE_WINDOWS_CONSOLE_IO */ diff --git a/Modules/_json.c b/Modules/_json.c index fa8e2a936d2c33..c90de05b046b00 100644 --- a/Modules/_json.c +++ b/Modules/_json.c @@ -1801,6 +1801,7 @@ _json_exec(PyObject *module) static PyModuleDef_Slot _json_slots[] = { {Py_mod_exec, _json_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_localemodule.c b/Modules/_localemodule.c index 96675cdfb661ad..1ada7305117bb7 100644 --- a/Modules/_localemodule.c +++ b/Modules/_localemodule.c @@ -874,6 +874,7 @@ _locale_exec(PyObject *module) static struct PyModuleDef_Slot _locale_slots[] = { {Py_mod_exec, _locale_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_lsprof.c b/Modules/_lsprof.c index 83d034ae7eed78..1c84f66ee6f579 100644 --- a/Modules/_lsprof.c +++ b/Modules/_lsprof.c @@ -49,6 +49,8 @@ typedef struct { int flags; PyObject *externalTimer; double externalTimerUnit; + int tool_id; + PyObject* missing; } ProfilerObject; #define POF_ENABLED 0x001 @@ -399,64 +401,6 @@ ptrace_leave_call(PyObject *self, void *key) pObj->freelistProfilerContext = pContext; } -static int -profiler_callback(PyObject *self, PyFrameObject *frame, int what, - PyObject *arg) -{ - switch (what) { - - /* the 'frame' of a called function is about to start its execution */ - case PyTrace_CALL: - { - PyCodeObject *code = PyFrame_GetCode(frame); - ptrace_enter_call(self, (void *)code, (PyObject *)code); - Py_DECREF(code); - break; - } - - /* the 'frame' of a called function is about to finish - (either normally or with an exception) */ - case PyTrace_RETURN: - { - PyCodeObject *code = PyFrame_GetCode(frame); - ptrace_leave_call(self, (void *)code); - Py_DECREF(code); - break; - } - - /* case PyTrace_EXCEPTION: - If the exception results in the function exiting, a - PyTrace_RETURN event will be generated, so we don't need to - handle it. */ - - /* the Python function 'frame' is issuing a call to the built-in - function 'arg' */ - case PyTrace_C_CALL: - if ((((ProfilerObject *)self)->flags & POF_BUILTINS) - && PyCFunction_Check(arg)) { - ptrace_enter_call(self, - ((PyCFunctionObject *)arg)->m_ml, - arg); - } - break; - - /* the call to the built-in function 'arg' is returning into its - caller 'frame' */ - case PyTrace_C_RETURN: /* ...normally */ - case PyTrace_C_EXCEPTION: /* ...with an exception set */ - if ((((ProfilerObject *)self)->flags & POF_BUILTINS) - && PyCFunction_Check(arg)) { - ptrace_leave_call(self, - ((PyCFunctionObject *)arg)->m_ml); - } - break; - - default: - break; - } - return 0; -} - static int pending_exception(ProfilerObject *pObj) { @@ -650,6 +594,99 @@ setBuiltins(ProfilerObject *pObj, int nvalue) return 0; } +PyObject* pystart_callback(ProfilerObject* self, PyObject *const *args, Py_ssize_t size) +{ + PyObject* code = args[0]; + ptrace_enter_call((PyObject*)self, (void *)code, (PyObject *)code); + + Py_RETURN_NONE; +} + +PyObject* pyreturn_callback(ProfilerObject* self, PyObject *const *args, Py_ssize_t size) +{ + PyObject* code = args[0]; + ptrace_leave_call((PyObject*)self, (void *)code); + + Py_RETURN_NONE; +} + +PyObject* get_cfunc_from_callable(PyObject* callable, PyObject* self_arg, PyObject* missing) +{ + // return a new reference + if (PyCFunction_Check(callable)) { + Py_INCREF(callable); + return (PyObject*)((PyCFunctionObject *)callable); + } + if (Py_TYPE(callable) == &PyMethodDescr_Type) { + /* For backwards compatibility need to + * convert to builtin method */ + + /* If no arg, skip */ + if (self_arg == missing) { + return NULL; + } + PyObject *meth = Py_TYPE(callable)->tp_descr_get( + callable, self_arg, (PyObject*)Py_TYPE(self_arg)); + if (meth == NULL) { + return NULL; + } + if (PyCFunction_Check(meth)) { + return (PyObject*)((PyCFunctionObject *)meth); + } + } + return NULL; +} + +PyObject* ccall_callback(ProfilerObject* self, PyObject *const *args, Py_ssize_t size) +{ + if (self->flags & POF_BUILTINS) { + PyObject* callable = args[2]; + PyObject* self_arg = args[3]; + + PyObject* cfunc = get_cfunc_from_callable(callable, self_arg, self->missing); + + if (cfunc) { + ptrace_enter_call((PyObject*)self, + ((PyCFunctionObject *)cfunc)->m_ml, + cfunc); + Py_DECREF(cfunc); + } + } + Py_RETURN_NONE; +} + +PyObject* creturn_callback(ProfilerObject* self, PyObject *const *args, Py_ssize_t size) +{ + if (self->flags & POF_BUILTINS) { + PyObject* callable = args[2]; + PyObject* self_arg = args[3]; + + PyObject* cfunc = get_cfunc_from_callable(callable, self_arg, self->missing); + + if (cfunc) { + ptrace_leave_call((PyObject*)self, + ((PyCFunctionObject *)cfunc)->m_ml); + Py_DECREF(cfunc); + } + } + Py_RETURN_NONE; +} + +static const struct { + int event; + const char* callback_method; +} callback_table[] = { + {PY_MONITORING_EVENT_PY_START, "_pystart_callback"}, + {PY_MONITORING_EVENT_PY_RESUME, "_pystart_callback"}, + {PY_MONITORING_EVENT_PY_RETURN, "_pyreturn_callback"}, + {PY_MONITORING_EVENT_PY_YIELD, "_pyreturn_callback"}, + {PY_MONITORING_EVENT_PY_UNWIND, "_pyreturn_callback"}, + {PY_MONITORING_EVENT_CALL, "_ccall_callback"}, + {PY_MONITORING_EVENT_C_RETURN, "_creturn_callback"}, + {PY_MONITORING_EVENT_C_RAISE, "_creturn_callback"}, + {0, NULL} +}; + PyDoc_STRVAR(enable_doc, "\ enable(subcalls=True, builtins=True)\n\ \n\ @@ -666,6 +703,8 @@ profiler_enable(ProfilerObject *self, PyObject *args, PyObject *kwds) int subcalls = -1; int builtins = -1; static char *kwlist[] = {"subcalls", "builtins", 0}; + int all_events = 0; + if (!PyArg_ParseTupleAndKeywords(args, kwds, "|pp:enable", kwlist, &subcalls, &builtins)) return NULL; @@ -673,11 +712,37 @@ profiler_enable(ProfilerObject *self, PyObject *args, PyObject *kwds) return NULL; } - PyThreadState *tstate = _PyThreadState_GET(); - if (_PyEval_SetProfile(tstate, profiler_callback, (PyObject*)self) < 0) { + PyObject* monitoring = _PyImport_GetModuleAttrString("sys", "monitoring"); + if (!monitoring) { + return NULL; + } + + if (PyObject_CallMethod(monitoring, "use_tool_id", "is", self->tool_id, "cProfile") == NULL) { + PyErr_Format(PyExc_ValueError, "Another profiling tool is already active"); + Py_DECREF(monitoring); + return NULL; + } + + for (int i = 0; callback_table[i].callback_method; i++) { + PyObject* callback = PyObject_GetAttrString((PyObject*)self, callback_table[i].callback_method); + if (!callback) { + Py_DECREF(monitoring); + return NULL; + } + Py_XDECREF(PyObject_CallMethod(monitoring, "register_callback", "iiO", self->tool_id, + (1 << callback_table[i].event), + callback)); + Py_DECREF(callback); + all_events |= (1 << callback_table[i].event); + } + + if (!PyObject_CallMethod(monitoring, "set_events", "ii", self->tool_id, all_events)) { + Py_DECREF(monitoring); return NULL; } + Py_DECREF(monitoring); + self->flags |= POF_ENABLED; Py_RETURN_NONE; } @@ -707,13 +772,44 @@ Stop collecting profiling information.\n\ static PyObject* profiler_disable(ProfilerObject *self, PyObject* noarg) { - PyThreadState *tstate = _PyThreadState_GET(); - if (_PyEval_SetProfile(tstate, NULL, NULL) < 0) { - return NULL; + if (self->flags & POF_ENABLED) { + PyObject* result = NULL; + PyObject* monitoring = _PyImport_GetModuleAttrString("sys", "monitoring"); + + if (!monitoring) { + return NULL; + } + + for (int i = 0; callback_table[i].callback_method; i++) { + result = PyObject_CallMethod(monitoring, "register_callback", "iiO", self->tool_id, + (1 << callback_table[i].event), Py_None); + if (!result) { + Py_DECREF(monitoring); + return NULL; + } + Py_DECREF(result); + } + + result = PyObject_CallMethod(monitoring, "set_events", "ii", self->tool_id, 0); + if (!result) { + Py_DECREF(monitoring); + return NULL; + } + Py_DECREF(result); + + result = PyObject_CallMethod(monitoring, "free_tool_id", "i", self->tool_id); + if (!result) { + Py_DECREF(monitoring); + return NULL; + } + Py_DECREF(result); + + Py_DECREF(monitoring); + + self->flags &= ~POF_ENABLED; + flush_unmatched(self); } - self->flags &= ~POF_ENABLED; - flush_unmatched(self); if (pending_exception(self)) { return NULL; } @@ -778,17 +874,37 @@ profiler_init(ProfilerObject *pObj, PyObject *args, PyObject *kw) return -1; pObj->externalTimerUnit = timeunit; Py_XSETREF(pObj->externalTimer, Py_XNewRef(timer)); + pObj->tool_id = PY_MONITORING_PROFILER_ID; + + PyObject* monitoring = _PyImport_GetModuleAttrString("sys", "monitoring"); + if (!monitoring) { + return -1; + } + pObj->missing = PyObject_GetAttrString(monitoring, "MISSING"); + if (!pObj->missing) { + Py_DECREF(monitoring); + return -1; + } + Py_DECREF(monitoring); return 0; } static PyMethodDef profiler_methods[] = { _LSPROF_PROFILER_GETSTATS_METHODDEF - {"enable", _PyCFunction_CAST(profiler_enable), + {"enable", _PyCFunction_CAST(profiler_enable), METH_VARARGS | METH_KEYWORDS, enable_doc}, - {"disable", (PyCFunction)profiler_disable, + {"disable", (PyCFunction)profiler_disable, METH_NOARGS, disable_doc}, - {"clear", (PyCFunction)profiler_clear, + {"clear", (PyCFunction)profiler_clear, METH_NOARGS, clear_doc}, + {"_pystart_callback", _PyCFunction_CAST(pystart_callback), + METH_FASTCALL, NULL}, + {"_pyreturn_callback", _PyCFunction_CAST(pyreturn_callback), + METH_FASTCALL, NULL}, + {"_ccall_callback", _PyCFunction_CAST(ccall_callback), + METH_FASTCALL, NULL}, + {"_creturn_callback", _PyCFunction_CAST(creturn_callback), + METH_FASTCALL, NULL}, {NULL, NULL} }; @@ -885,6 +1001,9 @@ _lsprof_exec(PyObject *module) static PyModuleDef_Slot _lsprofslots[] = { {Py_mod_exec, _lsprof_exec}, + // XXX gh-103092: fix isolation. + {Py_mod_multiple_interpreters, Py_MOD_MULTIPLE_INTERPRETERS_NOT_SUPPORTED}, + //{Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_lzmamodule.c b/Modules/_lzmamodule.c index bccab8639159e7..e34fbad230d51a 100644 --- a/Modules/_lzmamodule.c +++ b/Modules/_lzmamodule.c @@ -1611,6 +1611,7 @@ static PyMethodDef lzma_methods[] = { static PyModuleDef_Slot lzma_slots[] = { {Py_mod_exec, lzma_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_multiprocessing/multiprocessing.c b/Modules/_multiprocessing/multiprocessing.c index 2463e1e1a8bf7e..8f9daa5c3de0cc 100644 --- a/Modules/_multiprocessing/multiprocessing.c +++ b/Modules/_multiprocessing/multiprocessing.c @@ -276,6 +276,7 @@ multiprocessing_exec(PyObject *module) static PyModuleDef_Slot multiprocessing_slots[] = { {Py_mod_exec, multiprocessing_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_multiprocessing/posixshmem.c b/Modules/_multiprocessing/posixshmem.c index d64ded4168228f..88c93fe313785c 100644 --- a/Modules/_multiprocessing/posixshmem.c +++ b/Modules/_multiprocessing/posixshmem.c @@ -110,12 +110,19 @@ static PyMethodDef module_methods[ ] = { }; +static PyModuleDef_Slot module_slots[] = { + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, + {0, NULL} +}; + + static struct PyModuleDef _posixshmemmodule = { PyModuleDef_HEAD_INIT, .m_name = "_posixshmem", .m_doc = "POSIX shared memory module", .m_size = 0, .m_methods = module_methods, + .m_slots = module_slots, }; /* Module init function */ diff --git a/Modules/_opcode.c b/Modules/_opcode.c index 99be977417743e..b70d426fa29bc0 100644 --- a/Modules/_opcode.c +++ b/Modules/_opcode.c @@ -94,12 +94,18 @@ opcode_functions[] = { {NULL, NULL, 0, NULL} }; +static PyModuleDef_Slot module_slots[] = { + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, + {0, NULL} +}; + static struct PyModuleDef opcodemodule = { PyModuleDef_HEAD_INIT, .m_name = "_opcode", .m_doc = "Opcode support module.", .m_size = 0, - .m_methods = opcode_functions + .m_methods = opcode_functions, + .m_slots = module_slots, }; PyMODINIT_FUNC diff --git a/Modules/_operator.c b/Modules/_operator.c index 38335b6995016c..68ccc90562d38d 100644 --- a/Modules/_operator.c +++ b/Modules/_operator.c @@ -1828,6 +1828,7 @@ operator_exec(PyObject *module) static struct PyModuleDef_Slot operator_slots[] = { {Py_mod_exec, operator_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_pickle.c b/Modules/_pickle.c index 360c7910f67187..bf7ecae0cc0e50 100644 --- a/Modules/_pickle.c +++ b/Modules/_pickle.c @@ -7912,6 +7912,7 @@ _pickle_exec(PyObject *m) static PyModuleDef_Slot pickle_slots[] = { {Py_mod_exec, _pickle_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL}, }; diff --git a/Modules/_posixsubprocess.c b/Modules/_posixsubprocess.c index f5bce8cd7628ad..2bf83db0e228fb 100644 --- a/Modules/_posixsubprocess.c +++ b/Modules/_posixsubprocess.c @@ -1140,6 +1140,7 @@ static PyMethodDef module_methods[] = { }; static PyModuleDef_Slot _posixsubprocess_slots[] = { + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_queuemodule.c b/Modules/_queuemodule.c index af19dd6c198b67..d36a911a57c02c 100644 --- a/Modules/_queuemodule.c +++ b/Modules/_queuemodule.c @@ -431,6 +431,7 @@ queuemodule_exec(PyObject *module) static PyModuleDef_Slot queuemodule_slots[] = { {Py_mod_exec, queuemodule_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_randommodule.c b/Modules/_randommodule.c index 6e22053239305a..fda5ef267fb470 100644 --- a/Modules/_randommodule.c +++ b/Modules/_randommodule.c @@ -624,6 +624,7 @@ _random_exec(PyObject *module) static PyModuleDef_Slot _random_slots[] = { {Py_mod_exec, _random_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_scproxy.c b/Modules/_scproxy.c index 344b66f9aad522..0df0324df55f7d 100644 --- a/Modules/_scproxy.c +++ b/Modules/_scproxy.c @@ -206,6 +206,11 @@ get_proxies(PyObject* Py_UNUSED(mod), PyObject *Py_UNUSED(ignored)) kSCPropNetProxiesGopherProxy, kSCPropNetProxiesGopherPort); if (r == -1) goto error; + r = set_proxy(result, "socks", proxyDict, + kSCPropNetProxiesSOCKSEnable, + kSCPropNetProxiesSOCKSProxy, + kSCPropNetProxiesSOCKSPort); + if (r == -1) goto error; CFRelease(proxyDict); return result; @@ -232,6 +237,7 @@ static PyMethodDef mod_methods[] = { }; static PyModuleDef_Slot _scproxy_slots[] = { + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_sha3/LICENSE b/Modules/_sha3/LICENSE deleted file mode 100644 index d2d484d8820dcf..00000000000000 --- a/Modules/_sha3/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015 Markku-Juhani O. Saarinen - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - diff --git a/Modules/_sha3/README.txt b/Modules/_sha3/README.txt deleted file mode 100644 index b35919b01677d3..00000000000000 --- a/Modules/_sha3/README.txt +++ /dev/null @@ -1,8 +0,0 @@ -tiny_sha3 -========= - -https://github.com/mjosaarinen/tiny_sha3 -commit dcbb3192047c2a721f5f851db591871d428036a9 - -- All functions have been converted to static functions. -- sha3() function is commented out. diff --git a/Modules/_sha3/sha3.c b/Modules/_sha3/sha3.c deleted file mode 100644 index e2d3fd7b8ad855..00000000000000 --- a/Modules/_sha3/sha3.c +++ /dev/null @@ -1,193 +0,0 @@ -// sha3.c -// 19-Nov-11 Markku-Juhani O. Saarinen - -// Revised 07-Aug-15 to match with official release of FIPS PUB 202 "SHA3" -// Revised 03-Sep-15 for portability + OpenSSL - style API - -#include "sha3.h" - -// update the state with given number of rounds - -static void sha3_keccakf(uint64_t st[25]) -{ - // constants - const uint64_t keccakf_rndc[24] = { - 0x0000000000000001, 0x0000000000008082, 0x800000000000808a, - 0x8000000080008000, 0x000000000000808b, 0x0000000080000001, - 0x8000000080008081, 0x8000000000008009, 0x000000000000008a, - 0x0000000000000088, 0x0000000080008009, 0x000000008000000a, - 0x000000008000808b, 0x800000000000008b, 0x8000000000008089, - 0x8000000000008003, 0x8000000000008002, 0x8000000000000080, - 0x000000000000800a, 0x800000008000000a, 0x8000000080008081, - 0x8000000000008080, 0x0000000080000001, 0x8000000080008008 - }; - const int keccakf_rotc[24] = { - 1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14, - 27, 41, 56, 8, 25, 43, 62, 18, 39, 61, 20, 44 - }; - const int keccakf_piln[24] = { - 10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4, - 15, 23, 19, 13, 12, 2, 20, 14, 22, 9, 6, 1 - }; - - // variables - int i, j, r; - uint64_t t, bc[5]; - -#if __BYTE_ORDER__ != __ORDER_LITTLE_ENDIAN__ - uint8_t *v; - - // endianess conversion. this is redundant on little-endian targets - for (i = 0; i < 25; i++) { - v = (uint8_t *) &st[i]; - st[i] = ((uint64_t) v[0]) | (((uint64_t) v[1]) << 8) | - (((uint64_t) v[2]) << 16) | (((uint64_t) v[3]) << 24) | - (((uint64_t) v[4]) << 32) | (((uint64_t) v[5]) << 40) | - (((uint64_t) v[6]) << 48) | (((uint64_t) v[7]) << 56); - } -#endif - - // actual iteration - for (r = 0; r < KECCAKF_ROUNDS; r++) { - - // Theta - for (i = 0; i < 5; i++) - bc[i] = st[i] ^ st[i + 5] ^ st[i + 10] ^ st[i + 15] ^ st[i + 20]; - - for (i = 0; i < 5; i++) { - t = bc[(i + 4) % 5] ^ ROTL64(bc[(i + 1) % 5], 1); - for (j = 0; j < 25; j += 5) - st[j + i] ^= t; - } - - // Rho Pi - t = st[1]; - for (i = 0; i < 24; i++) { - j = keccakf_piln[i]; - bc[0] = st[j]; - st[j] = ROTL64(t, keccakf_rotc[i]); - t = bc[0]; - } - - // Chi - for (j = 0; j < 25; j += 5) { - for (i = 0; i < 5; i++) - bc[i] = st[j + i]; - for (i = 0; i < 5; i++) - st[j + i] ^= (~bc[(i + 1) % 5]) & bc[(i + 2) % 5]; - } - - // Iota - st[0] ^= keccakf_rndc[r]; - } - -#if __BYTE_ORDER__ != __ORDER_LITTLE_ENDIAN__ - // endianess conversion. this is redundant on little-endian targets - for (i = 0; i < 25; i++) { - v = (uint8_t *) &st[i]; - t = st[i]; - v[0] = t & 0xFF; - v[1] = (t >> 8) & 0xFF; - v[2] = (t >> 16) & 0xFF; - v[3] = (t >> 24) & 0xFF; - v[4] = (t >> 32) & 0xFF; - v[5] = (t >> 40) & 0xFF; - v[6] = (t >> 48) & 0xFF; - v[7] = (t >> 56) & 0xFF; - } -#endif -} - -// Initialize the context for SHA3 - -static int sha3_init(sha3_ctx_t *c, int mdlen) -{ - int i; - - for (i = 0; i < 25; i++) - c->st.q[i] = 0; - c->mdlen = mdlen; - c->rsiz = 200 - 2 * mdlen; - c->pt = 0; - - return 1; -} - -// update state with more data - -static int sha3_update(sha3_ctx_t *c, const void *data, size_t len) -{ - size_t i; - int j; - - j = c->pt; - for (i = 0; i < len; i++) { - c->st.b[j++] ^= ((const uint8_t *) data)[i]; - if (j >= c->rsiz) { - sha3_keccakf(c->st.q); - j = 0; - } - } - c->pt = j; - - return 1; -} - -// finalize and output a hash - -static int sha3_final(void *md, sha3_ctx_t *c) -{ - int i; - - c->st.b[c->pt] ^= 0x06; - c->st.b[c->rsiz - 1] ^= 0x80; - sha3_keccakf(c->st.q); - - for (i = 0; i < c->mdlen; i++) { - ((uint8_t *) md)[i] = c->st.b[i]; - } - - return 1; -} - -#if 0 -// compute a SHA-3 hash (md) of given byte length from "in" - -void *sha3(const void *in, size_t inlen, void *md, int mdlen) -{ - sha3_ctx_t sha3; - - sha3_init(&sha3, mdlen); - sha3_update(&sha3, in, inlen); - sha3_final(md, &sha3); - - return md; -} -#endif - -// SHAKE128 and SHAKE256 extensible-output functionality - -static void shake_xof(sha3_ctx_t *c) -{ - c->st.b[c->pt] ^= 0x1F; - c->st.b[c->rsiz - 1] ^= 0x80; - sha3_keccakf(c->st.q); - c->pt = 0; -} - -static void shake_out(sha3_ctx_t *c, void *out, size_t len) -{ - size_t i; - int j; - - j = c->pt; - for (i = 0; i < len; i++) { - if (j >= c->rsiz) { - sha3_keccakf(c->st.q); - j = 0; - } - ((uint8_t *) out)[i] = c->st.b[j++]; - } - c->pt = j; -} - diff --git a/Modules/_sha3/sha3.h b/Modules/_sha3/sha3.h deleted file mode 100644 index f973d6733ec2cc..00000000000000 --- a/Modules/_sha3/sha3.h +++ /dev/null @@ -1,49 +0,0 @@ -// sha3.h -// 19-Nov-11 Markku-Juhani O. Saarinen - -#ifndef SHA3_H -#define SHA3_H - -#include -#include - -#ifndef KECCAKF_ROUNDS -#define KECCAKF_ROUNDS 24 -#endif - -#ifndef ROTL64 -#define ROTL64(x, y) (((x) << (y)) | ((x) >> (64 - (y)))) -#endif - -// state context -typedef struct { - union { // state: - uint8_t b[200]; // 8-bit bytes - uint64_t q[25]; // 64-bit words - } st; - int pt, rsiz, mdlen; // these don't overflow -} sha3_ctx_t; - -// Compression function. -static void sha3_keccakf(uint64_t st[25]); - -// OpenSSL - like interfece -static int sha3_init(sha3_ctx_t *c, int mdlen); // mdlen = hash output in bytes -static int sha3_update(sha3_ctx_t *c, const void *data, size_t len); -static int sha3_final(void *md, sha3_ctx_t *c); // digest goes to md - -// compute a sha3 hash (md) of given byte length from "in" -#if 0 -static void *sha3(const void *in, size_t inlen, void *md, int mdlen); -#endif - -// SHAKE128 and SHAKE256 extensible-output functions -#define shake128_init(c) sha3_init(c, 16) -#define shake256_init(c) sha3_init(c, 32) -#define shake_update sha3_update - -static void shake_xof(sha3_ctx_t *c); -static void shake_out(sha3_ctx_t *c, void *out, size_t len); - -#endif - diff --git a/Modules/_sqlite/clinic/connection.c.h b/Modules/_sqlite/clinic/connection.c.h index 182754cca36d61..417abcc4626170 100644 --- a/Modules/_sqlite/clinic/connection.c.h +++ b/Modules/_sqlite/clinic/connection.c.h @@ -228,7 +228,7 @@ PyDoc_STRVAR(blobopen__doc__, static PyObject * blobopen_impl(pysqlite_Connection *self, const char *table, const char *col, - int row, int readonly, const char *name); + sqlite3_int64 row, int readonly, const char *name); static PyObject * blobopen(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) @@ -263,7 +263,7 @@ blobopen(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs, PyO Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 3; const char *table; const char *col; - int row; + sqlite3_int64 row; int readonly = 0; const char *name = "main"; @@ -297,8 +297,7 @@ blobopen(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs, PyO PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - row = _PyLong_AsInt(args[2]); - if (row == -1 && PyErr_Occurred()) { + if (!sqlite3_int64_converter(args[2], &row)) { goto exit; } if (!noptargs) { @@ -1666,4 +1665,4 @@ getconfig(pysqlite_Connection *self, PyObject *arg) #ifndef DESERIALIZE_METHODDEF #define DESERIALIZE_METHODDEF #endif /* !defined(DESERIALIZE_METHODDEF) */ -/*[clinic end generated code: output=8b03149c115ee6da input=a9049054013a1b77]*/ +/*[clinic end generated code: output=834a99827555bf1a input=a9049054013a1b77]*/ diff --git a/Modules/_sqlite/connection.c b/Modules/_sqlite/connection.c index aec3aa8bbf4ed8..7bbb462ed54dfa 100644 --- a/Modules/_sqlite/connection.c +++ b/Modules/_sqlite/connection.c @@ -118,6 +118,20 @@ autocommit_converter(PyObject *val, enum autocommit_mode *result) return 0; } +static int +sqlite3_int64_converter(PyObject *obj, sqlite3_int64 *result) +{ + if (!PyLong_Check(obj)) { + PyErr_SetString(PyExc_TypeError, "expected 'int'"); + return 0; + } + *result = _pysqlite_long_as_int64(obj); + if (PyErr_Occurred()) { + return 0; + } + return 1; +} + #define clinic_state() (pysqlite_get_state_by_type(Py_TYPE(self))) #include "clinic/connection.c.h" #undef clinic_state @@ -188,8 +202,12 @@ class Autocommit_converter(CConverter): type = "enum autocommit_mode" converter = "autocommit_converter" +class sqlite3_int64_converter(CConverter): + type = "sqlite3_int64" + converter = "sqlite3_int64_converter" + [python start generated code]*/ -/*[python end generated code: output=da39a3ee5e6b4b0d input=bc2aa6c7ba0c5f8f]*/ +/*[python end generated code: output=da39a3ee5e6b4b0d input=dff8760fb1eba6a1]*/ // NB: This needs to be in sync with the sqlite3.connect docstring /*[clinic input] @@ -483,7 +501,7 @@ _sqlite3.Connection.blobopen as blobopen Table name. column as col: str Column name. - row: int + row: sqlite3_int64 Row index. / * @@ -497,8 +515,8 @@ Open and return a BLOB object. static PyObject * blobopen_impl(pysqlite_Connection *self, const char *table, const char *col, - int row, int readonly, const char *name) -/*[clinic end generated code: output=0c8e2e58516d0b5c input=fa73c83aa7a7ddee]*/ + sqlite3_int64 row, int readonly, const char *name) +/*[clinic end generated code: output=6a02d43efb885d1c input=23576bd1108d8774]*/ { if (!pysqlite_check_thread(self) || !pysqlite_check_connection(self)) { return NULL; diff --git a/Modules/_sqlite/module.c b/Modules/_sqlite/module.c index 9c42faa232c70d..27bd42f4595e1c 100644 --- a/Modules/_sqlite/module.c +++ b/Modules/_sqlite/module.c @@ -785,6 +785,7 @@ module_exec(PyObject *module) static struct PyModuleDef_Slot module_slots[] = { {Py_mod_exec, module_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL}, }; diff --git a/Modules/_sre/sre.c b/Modules/_sre/sre.c index 4b6290a5967932..f8a1a05a318889 100644 --- a/Modules/_sre/sre.c +++ b/Modules/_sre/sre.c @@ -3221,6 +3221,7 @@ sre_exec(PyObject *m) static PyModuleDef_Slot sre_slots[] = { {Py_mod_exec, sre_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL}, }; diff --git a/Modules/_ssl.c b/Modules/_ssl.c index c9e2f24d66cc00..016a5a5cbca548 100644 --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -6161,6 +6161,9 @@ static PyModuleDef_Slot sslmodule_slots[] = { {Py_mod_exec, sslmodule_init_constants}, {Py_mod_exec, sslmodule_init_versioninfo}, {Py_mod_exec, sslmodule_init_strings}, + // XXX gh-103092: fix isolation. + {Py_mod_multiple_interpreters, Py_MOD_MULTIPLE_INTERPRETERS_NOT_SUPPORTED}, + //{Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_stat.c b/Modules/_stat.c index 546e6a5f94ca15..4218799103b59d 100644 --- a/Modules/_stat.c +++ b/Modules/_stat.c @@ -612,6 +612,7 @@ stat_exec(PyObject *module) static PyModuleDef_Slot stat_slots[] = { {Py_mod_exec, stat_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_statisticsmodule.c b/Modules/_statisticsmodule.c index b9d1e4f1616036..1d5465fbe6d04e 100644 --- a/Modules/_statisticsmodule.c +++ b/Modules/_statisticsmodule.c @@ -129,6 +129,7 @@ PyDoc_STRVAR(statistics_doc, "Accelerators for the statistics module.\n"); static struct PyModuleDef_Slot _statisticsmodule_slots[] = { + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_struct.c b/Modules/_struct.c index 3db7b991acd0a1..4f9478bd98095d 100644 --- a/Modules/_struct.c +++ b/Modules/_struct.c @@ -1832,11 +1832,6 @@ unpackiter_iternext(unpackiterobject *self) return result; } -PyObject *unpackiter_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { - PyErr_Format(PyExc_TypeError, "Cannot create '%.200s objects", _PyType_Name(type)); - return NULL; -} - static PyType_Slot unpackiter_type_slots[] = { {Py_tp_dealloc, unpackiter_dealloc}, {Py_tp_getattro, PyObject_GenericGetAttr}, @@ -1844,7 +1839,6 @@ static PyType_Slot unpackiter_type_slots[] = { {Py_tp_iter, PyObject_SelfIter}, {Py_tp_iternext, unpackiter_iternext}, {Py_tp_methods, unpackiter_methods}, - {Py_tp_new, unpackiter_new}, {0, 0}, }; @@ -1853,7 +1847,7 @@ static PyType_Spec unpackiter_type_spec = { sizeof(unpackiterobject), 0, (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | - Py_TPFLAGS_IMMUTABLETYPE), + Py_TPFLAGS_IMMUTABLETYPE | Py_TPFLAGS_DISALLOW_INSTANTIATION), unpackiter_type_slots }; @@ -2572,6 +2566,7 @@ _structmodule_exec(PyObject *m) static PyModuleDef_Slot _structmodule_slots[] = { {Py_mod_exec, _structmodule_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_testcapi/buffer.c b/Modules/_testcapi/buffer.c new file mode 100644 index 00000000000000..aff9a477eff57e --- /dev/null +++ b/Modules/_testcapi/buffer.c @@ -0,0 +1,102 @@ +/* Test PEP 688 - Buffers */ + +#include "parts.h" + +#include "structmember.h" // PyMemberDef +#include // offsetof + +typedef struct { + PyObject_HEAD + PyObject *obj; + Py_ssize_t references; +} testBufObject; + +static PyObject * +testbuf_new(PyTypeObject *type, PyObject *args, PyObject *kwds) +{ + PyObject *obj = PyBytes_FromString("test"); + if (obj == NULL) { + return NULL; + } + testBufObject *self = (testBufObject *)type->tp_alloc(type, 0); + if (self == NULL) { + Py_DECREF(obj); + return NULL; + } + self->obj = obj; + self->references = 0; + return (PyObject *)self; +} + +static int +testbuf_traverse(testBufObject *self, visitproc visit, void *arg) +{ + Py_VISIT(self->obj); + return 0; +} + +static int +testbuf_clear(testBufObject *self) +{ + Py_CLEAR(self->obj); + return 0; +} + +static void +testbuf_dealloc(testBufObject *self) +{ + PyObject_GC_UnTrack(self); + Py_XDECREF(self->obj); + Py_TYPE(self)->tp_free((PyObject *) self); +} + +static int +testbuf_getbuf(testBufObject *self, Py_buffer *view, int flags) +{ + int buf = PyObject_GetBuffer(self->obj, view, flags); + Py_SETREF(view->obj, Py_NewRef(self)); + self->references++; + return buf; +} + +static void +testbuf_releasebuf(testBufObject *self, Py_buffer *view) +{ + self->references--; + assert(self->references >= 0); +} + +static PyBufferProcs testbuf_as_buffer = { + .bf_getbuffer = (getbufferproc) testbuf_getbuf, + .bf_releasebuffer = (releasebufferproc) testbuf_releasebuf, +}; + +static struct PyMemberDef testbuf_members[] = { + {"references", T_PYSSIZET, offsetof(testBufObject, references), READONLY}, + {NULL}, +}; + +static PyTypeObject testBufType = { + PyVarObject_HEAD_INIT(NULL, 0) + .tp_name = "testBufType", + .tp_basicsize = sizeof(testBufObject), + .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, + .tp_new = testbuf_new, + .tp_dealloc = (destructor) testbuf_dealloc, + .tp_traverse = (traverseproc) testbuf_traverse, + .tp_clear = (inquiry) testbuf_clear, + .tp_as_buffer = &testbuf_as_buffer, + .tp_members = testbuf_members +}; + +int +_PyTestCapi_Init_Buffer(PyObject *m) { + if (PyType_Ready(&testBufType) < 0) { + return -1; + } + if (PyModule_AddObjectRef(m, "testBuf", (PyObject *)&testBufType)) { + return -1; + } + + return 0; +} diff --git a/Modules/_testcapi/heaptype.c b/Modules/_testcapi/heaptype.c index 209cc182c0698d..3488e35922c5ac 100644 --- a/Modules/_testcapi/heaptype.c +++ b/Modules/_testcapi/heaptype.c @@ -22,7 +22,7 @@ static PyObject *pytype_fromspec_meta(PyObject* self, PyObject *meta) "_testcapi.HeapCTypeViaMetaclass", sizeof(PyObject), 0, - Py_TPFLAGS_DEFAULT, + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, HeapCTypeViaMetaclass_slots }; @@ -371,7 +371,6 @@ create_type_from_repeated_slots(PyObject *self, PyObject *variant_obj) } - static PyObject * make_immutable_type_with_base(PyObject *self, PyObject *base) { @@ -385,6 +384,30 @@ make_immutable_type_with_base(PyObject *self, PyObject *base) return PyType_FromSpecWithBases(&ImmutableSubclass_spec, base); } +static PyObject * +make_type_with_base(PyObject *self, PyObject *base) +{ + assert(PyType_Check(base)); + PyType_Spec ImmutableSubclass_spec = { + .name = "_testcapi.Subclass", + .basicsize = (int)((PyTypeObject*)base)->tp_basicsize, + .slots = empty_type_slots, + .flags = Py_TPFLAGS_DEFAULT, + }; + return PyType_FromSpecWithBases(&ImmutableSubclass_spec, base); +} + + +static PyObject * +pyobject_getitemdata(PyObject *self, PyObject *o) +{ + void *pointer = PyObject_GetItemData(o); + if (pointer == NULL) { + return NULL; + } + return PyLong_FromVoidPtr(pointer); +} + static PyMethodDef TestMethods[] = { {"pytype_fromspec_meta", pytype_fromspec_meta, METH_O}, @@ -397,6 +420,8 @@ static PyMethodDef TestMethods[] = { test_from_spec_invalid_metatype_inheritance, METH_NOARGS}, {"make_immutable_type_with_base", make_immutable_type_with_base, METH_O}, + {"make_type_with_base", make_type_with_base, METH_O}, + {"pyobject_getitemdata", pyobject_getitemdata, METH_O}, {NULL}, }; @@ -973,6 +998,113 @@ static PyType_Spec HeapCTypeSetattr_spec = { HeapCTypeSetattr_slots }; +PyDoc_STRVAR(HeapCCollection_doc, +"Tuple-like heap type that uses PyObject_GetItemData for items."); + +static PyObject* +HeapCCollection_new(PyTypeObject *subtype, PyObject *args, PyObject *kwds) +{ + PyObject *self = NULL; + PyObject *result = NULL; + + Py_ssize_t size = PyTuple_GET_SIZE(args); + self = subtype->tp_alloc(subtype, size); + if (!self) { + goto finally; + } + PyObject **data = PyObject_GetItemData(self); + if (!data) { + goto finally; + } + + for (Py_ssize_t i = 0; i < size; i++) { + data[i] = Py_NewRef(PyTuple_GET_ITEM(args, i)); + } + + result = self; + self = NULL; + finally: + Py_XDECREF(self); + return result; +} + +static Py_ssize_t +HeapCCollection_length(PyVarObject *self) +{ + return Py_SIZE(self); +} + +static PyObject* +HeapCCollection_item(PyObject *self, Py_ssize_t i) +{ + if (i < 0 || i >= Py_SIZE(self)) { + return PyErr_Format(PyExc_IndexError, "index %zd out of range", i); + } + PyObject **data = PyObject_GetItemData(self); + if (!data) { + return NULL; + } + return Py_NewRef(data[i]); +} + +static int +HeapCCollection_traverse(PyObject *self, visitproc visit, void *arg) +{ + PyObject **data = PyObject_GetItemData(self); + if (!data) { + return -1; + } + for (Py_ssize_t i = 0; i < Py_SIZE(self); i++) { + Py_VISIT(data[i]); + } + return 0; +} + +static int +HeapCCollection_clear(PyObject *self) +{ + PyObject **data = PyObject_GetItemData(self); + if (!data) { + return -1; + } + Py_ssize_t size = Py_SIZE(self); + Py_SET_SIZE(self, 0); + for (Py_ssize_t i = 0; i < size; i++) { + Py_CLEAR(data[i]); + } + return 0; +} + +static void +HeapCCollection_dealloc(PyObject *self) +{ + PyTypeObject *tp = Py_TYPE(self); + HeapCCollection_clear(self); + PyObject_GC_UnTrack(self); + tp->tp_free(self); + Py_DECREF(tp); +} + +static PyType_Slot HeapCCollection_slots[] = { + {Py_tp_new, HeapCCollection_new}, + {Py_sq_length, HeapCCollection_length}, + {Py_sq_item, HeapCCollection_item}, + {Py_tp_traverse, HeapCCollection_traverse}, + {Py_tp_clear, HeapCCollection_clear}, + {Py_tp_dealloc, HeapCCollection_dealloc}, + {Py_tp_doc, (void *)HeapCCollection_doc}, + {0, 0}, +}; + +static PyType_Spec HeapCCollection_spec = { + .name = "_testcapi.HeapCCollection", + .basicsize = sizeof(PyVarObject), + .itemsize = sizeof(PyObject*), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | + Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_ITEMS_AT_END), + .slots = HeapCCollection_slots, +}; + int _PyTestCapi_Init_Heaptype(PyObject *m) { _testcapimodule = PyModule_GetDef(m); @@ -1096,5 +1228,16 @@ _PyTestCapi_Init_Heaptype(PyObject *m) { } PyModule_AddObject(m, "HeapCTypeMetaclassCustomNew", HeapCTypeMetaclassCustomNew); + PyObject *HeapCCollection = PyType_FromMetaclass( + NULL, m, &HeapCCollection_spec, NULL); + if (HeapCCollection == NULL) { + return -1; + } + int rc = PyModule_AddType(m, (PyTypeObject *)HeapCCollection); + Py_DECREF(HeapCCollection); + if (rc < 0) { + return -1; + } + return 0; } diff --git a/Modules/_testcapi/heaptype_relative.c b/Modules/_testcapi/heaptype_relative.c new file mode 100644 index 00000000000000..c247ca33b33708 --- /dev/null +++ b/Modules/_testcapi/heaptype_relative.c @@ -0,0 +1,343 @@ +#define Py_LIMITED_API 0x030c0000 // 3.12 +#include "parts.h" +#include // max_align_t +#include // memset + +#ifdef LIMITED_API_AVAILABLE + +static PyType_Slot empty_slots[] = { + {0, NULL}, +}; + +static PyObject * +make_sized_heaptypes(PyObject *module, PyObject *args) +{ + PyObject *base = NULL; + PyObject *sub = NULL; + PyObject *instance = NULL; + PyObject *result = NULL; + + int extra_base_size, basicsize; + + int r = PyArg_ParseTuple(args, "ii", &extra_base_size, &basicsize); + if (!r) { + goto finally; + } + + PyType_Spec base_spec = { + .name = "_testcapi.Base", + .basicsize = sizeof(PyObject) + extra_base_size, + .flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, + .slots = empty_slots, + }; + PyType_Spec sub_spec = { + .name = "_testcapi.Sub", + .basicsize = basicsize, + .flags = Py_TPFLAGS_DEFAULT, + .slots = empty_slots, + }; + + base = PyType_FromMetaclass(NULL, module, &base_spec, NULL); + if (!base) { + goto finally; + } + sub = PyType_FromMetaclass(NULL, module, &sub_spec, base); + if (!sub) { + goto finally; + } + instance = PyObject_CallNoArgs(sub); + if (!instance) { + goto finally; + } + char *data_ptr = PyObject_GetTypeData(instance, (PyTypeObject *)sub); + if (!data_ptr) { + goto finally; + } + Py_ssize_t data_size = PyType_GetTypeDataSize((PyTypeObject *)sub); + if (data_size < 0) { + goto finally; + } + + result = Py_BuildValue("OOOKnn", base, sub, instance, + (unsigned long long)data_ptr, + (Py_ssize_t)(data_ptr - (char*)instance), + data_size); + finally: + Py_XDECREF(base); + Py_XDECREF(sub); + Py_XDECREF(instance); + return result; +} + +static PyObject * +var_heaptype_set_data_to_3s( + PyObject *self, PyTypeObject *defining_class, + PyObject **args, Py_ssize_t nargs, PyObject *kwnames) +{ + void *data_ptr = PyObject_GetTypeData(self, defining_class); + if (!data_ptr) { + return NULL; + } + Py_ssize_t data_size = PyType_GetTypeDataSize(defining_class); + if (data_size < 0) { + return NULL; + } + memset(data_ptr, 3, data_size); + Py_RETURN_NONE; +} + +static PyObject * +var_heaptype_get_data(PyObject *self, PyTypeObject *defining_class, + PyObject **args, Py_ssize_t nargs, PyObject *kwnames) +{ + void *data_ptr = PyObject_GetTypeData(self, defining_class); + if (!data_ptr) { + return NULL; + } + Py_ssize_t data_size = PyType_GetTypeDataSize(defining_class); + if (data_size < 0) { + return NULL; + } + return PyBytes_FromStringAndSize(data_ptr, data_size); +} + +static PyMethodDef var_heaptype_methods[] = { + {"set_data_to_3s", _PyCFunction_CAST(var_heaptype_set_data_to_3s), + METH_METHOD | METH_FASTCALL | METH_KEYWORDS}, + {"get_data", _PyCFunction_CAST(var_heaptype_get_data), + METH_METHOD | METH_FASTCALL | METH_KEYWORDS}, + {NULL}, +}; + +static PyObject * +subclass_var_heaptype(PyObject *module, PyObject *args) +{ + PyObject *result = NULL; + + PyObject *base; // borrowed from args + int basicsize, itemsize; + long pfunc; + + int r = PyArg_ParseTuple(args, "Oiil", &base, &basicsize, &itemsize, &pfunc); + if (!r) { + goto finally; + } + + PyType_Slot slots[] = { + {Py_tp_methods, var_heaptype_methods}, + {0, NULL}, + }; + + PyType_Spec sub_spec = { + .name = "_testcapi.Sub", + .basicsize = basicsize, + .itemsize = itemsize, + .flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_ITEMS_AT_END, + .slots = slots, + }; + + result = PyType_FromMetaclass(NULL, module, &sub_spec, base); + finally: + return result; +} + +static PyObject * +subclass_heaptype(PyObject *module, PyObject *args) +{ + PyObject *result = NULL; + + PyObject *base; // borrowed from args + int basicsize, itemsize; + + int r = PyArg_ParseTuple(args, "Oii", &base, &basicsize, &itemsize); + if (!r) { + goto finally; + } + + PyType_Slot slots[] = { + {Py_tp_methods, var_heaptype_methods}, + {0, NULL}, + }; + + PyType_Spec sub_spec = { + .name = "_testcapi.Sub", + .basicsize = basicsize, + .itemsize = itemsize, + .flags = Py_TPFLAGS_DEFAULT, + .slots = slots, + }; + + result = PyType_FromMetaclass(NULL, module, &sub_spec, base); + finally: + return result; +} + +static PyMemberDef * +heaptype_with_member_extract_and_check_memb(PyObject *self) +{ + PyMemberDef *def = PyType_GetSlot(Py_TYPE(self), Py_tp_members); + if (!def) { + if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_ValueError, "tp_members is NULL"); + } + return NULL; + } + if (!def[0].name) { + PyErr_SetString(PyExc_ValueError, "tp_members[0] is NULL"); + return NULL; + } + if (def[1].name) { + PyErr_SetString(PyExc_ValueError, "tp_members[1] is not NULL"); + return NULL; + } + if (strcmp(def[0].name, "memb")) { + PyErr_SetString(PyExc_ValueError, "tp_members[0] is not for `memb`"); + return NULL; + } + if (def[0].flags) { + PyErr_SetString(PyExc_ValueError, "tp_members[0] has flags set"); + return NULL; + } + return def; +} + +static PyObject * +heaptype_with_member_get_memb(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + PyMemberDef *def = heaptype_with_member_extract_and_check_memb(self); + return PyMember_GetOne((const char *)self, def); +} + +static PyObject * +heaptype_with_member_set_memb(PyObject *self, PyObject *value) +{ + PyMemberDef *def = heaptype_with_member_extract_and_check_memb(self); + int r = PyMember_SetOne((char *)self, def, value); + if (r < 0) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject * +get_memb_offset(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + PyMemberDef *def = heaptype_with_member_extract_and_check_memb(self); + return PyLong_FromSsize_t(def->offset); +} + +static PyObject * +heaptype_with_member_get_memb_relative(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + PyMemberDef def = {"memb", Py_T_BYTE, sizeof(PyObject), Py_RELATIVE_OFFSET}; + return PyMember_GetOne((const char *)self, &def); +} + +static PyObject * +heaptype_with_member_set_memb_relative(PyObject *self, PyObject *value) +{ + PyMemberDef def = {"memb", Py_T_BYTE, sizeof(PyObject), Py_RELATIVE_OFFSET}; + int r = PyMember_SetOne((char *)self, &def, value); + if (r < 0) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyMethodDef heaptype_with_member_methods[] = { + {"get_memb", heaptype_with_member_get_memb, METH_NOARGS}, + {"set_memb", heaptype_with_member_set_memb, METH_O}, + {"get_memb_offset", get_memb_offset, METH_NOARGS}, + {"get_memb_relative", heaptype_with_member_get_memb_relative, METH_NOARGS}, + {"set_memb_relative", heaptype_with_member_set_memb_relative, METH_O}, + {NULL}, +}; + +static PyObject * +make_heaptype_with_member(PyObject *module, PyObject *args) +{ + PyObject *base = NULL; + PyObject *result = NULL; + + int extra_base_size, basicsize, offset, add_flag; + + int r = PyArg_ParseTuple(args, "iiip", &extra_base_size, &basicsize, &offset, &add_flag); + if (!r) { + goto finally; + } + + PyType_Spec base_spec = { + .name = "_testcapi.Base", + .basicsize = sizeof(PyObject) + extra_base_size, + .flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, + .slots = empty_slots, + }; + base = PyType_FromMetaclass(NULL, module, &base_spec, NULL); + if (!base) { + goto finally; + } + + PyMemberDef members[] = { + {"memb", Py_T_BYTE, offset, add_flag ? Py_RELATIVE_OFFSET : 0}, + {0}, + }; + PyType_Slot slots[] = { + {Py_tp_members, members}, + {Py_tp_methods, heaptype_with_member_methods}, + {0, NULL}, + }; + + PyType_Spec sub_spec = { + .name = "_testcapi.Sub", + .basicsize = basicsize, + .flags = Py_TPFLAGS_DEFAULT, + .slots = slots, + }; + + result = PyType_FromMetaclass(NULL, module, &sub_spec, base); + finally: + Py_XDECREF(base); + return result; +} + + +static PyObject * +test_alignof_max_align_t(PyObject *module, PyObject *Py_UNUSED(ignored)) +{ + // We define ALIGNOF_MAX_ALIGN_T even if the compiler doesn't support + // max_align_t. Double-check that it's correct. + assert(ALIGNOF_MAX_ALIGN_T > 0); + assert(ALIGNOF_MAX_ALIGN_T >= _Alignof(long long)); + assert(ALIGNOF_MAX_ALIGN_T >= _Alignof(long double)); + assert(ALIGNOF_MAX_ALIGN_T >= _Alignof(void*)); + assert(ALIGNOF_MAX_ALIGN_T >= _Alignof(void (*)(void))); + + // Ensure it's a power of two + assert((ALIGNOF_MAX_ALIGN_T & (ALIGNOF_MAX_ALIGN_T - 1)) == 0); + + Py_RETURN_NONE; +} + +static PyMethodDef TestMethods[] = { + {"make_sized_heaptypes", make_sized_heaptypes, METH_VARARGS}, + {"subclass_var_heaptype", subclass_var_heaptype, METH_VARARGS}, + {"subclass_heaptype", subclass_heaptype, METH_VARARGS}, + {"make_heaptype_with_member", make_heaptype_with_member, METH_VARARGS}, + {"test_alignof_max_align_t", test_alignof_max_align_t, METH_NOARGS}, + {NULL}, +}; + +int +_PyTestCapi_Init_HeaptypeRelative(PyObject *m) { + if (PyModule_AddFunctions(m, TestMethods) < 0) { + return -1; + } + + if (PyModule_AddIntMacro(m, ALIGNOF_MAX_ALIGN_T) < 0) { + return -1; + } + + return 0; +} + +#endif // LIMITED_API_AVAILABLE diff --git a/Modules/_testcapi/immortal.c b/Modules/_testcapi/immortal.c new file mode 100644 index 00000000000000..9f81389811c645 --- /dev/null +++ b/Modules/_testcapi/immortal.c @@ -0,0 +1,47 @@ +#include "parts.h" + +int verify_immortality(PyObject *object) +{ + assert(_Py_IsImmortal(object)); + Py_ssize_t old_count = Py_REFCNT(object); + for (int j = 0; j < 10000; j++) { + Py_DECREF(object); + } + Py_ssize_t current_count = Py_REFCNT(object); + return old_count == current_count; +} + +static PyObject * +test_immortal_builtins(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + PyObject *objects[] = {Py_True, Py_False, Py_None, Py_Ellipsis}; + Py_ssize_t n = Py_ARRAY_LENGTH(objects); + for (Py_ssize_t i = 0; i < n; i++) { + assert(verify_immortality(objects[i])); + } + Py_RETURN_NONE; +} + +static PyObject * +test_immortal_small_ints(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + for (int i = -5; i <= 256; i++) { + assert(verify_immortality(PyLong_FromLong(i))); + } + Py_RETURN_NONE; +} + +static PyMethodDef test_methods[] = { + {"test_immortal_builtins", test_immortal_builtins, METH_NOARGS}, + {"test_immortal_small_ints", test_immortal_small_ints, METH_NOARGS}, + {NULL}, +}; + +int +_PyTestCapi_Init_Immortal(PyObject *mod) +{ + if (PyModule_AddFunctions(mod, test_methods) < 0) { + return -1; + } + return 0; +} diff --git a/Modules/_testcapi/parts.h b/Modules/_testcapi/parts.h index 60ec81dad2ba9e..663d4f2255de88 100644 --- a/Modules/_testcapi/parts.h +++ b/Modules/_testcapi/parts.h @@ -38,10 +38,13 @@ int _PyTestCapi_Init_Float(PyObject *module); int _PyTestCapi_Init_Structmember(PyObject *module); int _PyTestCapi_Init_Exceptions(PyObject *module); int _PyTestCapi_Init_Code(PyObject *module); +int _PyTestCapi_Init_Buffer(PyObject *module); int _PyTestCapi_Init_PyOS(PyObject *module); +int _PyTestCapi_Init_Immortal(PyObject *module); #ifdef LIMITED_API_AVAILABLE int _PyTestCapi_Init_VectorcallLimited(PyObject *module); +int _PyTestCapi_Init_HeaptypeRelative(PyObject *module); #endif // LIMITED_API_AVAILABLE #endif diff --git a/Modules/_testcapi/unicode.c b/Modules/_testcapi/unicode.c index 2d23993ce420b3..7dd3b9c0c03e50 100644 --- a/Modules/_testcapi/unicode.c +++ b/Modules/_testcapi/unicode.c @@ -102,6 +102,278 @@ test_widechar(PyObject *self, PyObject *Py_UNUSED(ignored)) #define NULLABLE(x) do { if (x == Py_None) x = NULL; } while (0); +static PyObject * +unicode_copy(PyObject *unicode) +{ + PyObject *copy; + + if (!unicode) { + return NULL; + } + if (!PyUnicode_Check(unicode)) { + Py_INCREF(unicode); + return unicode; + } + + copy = PyUnicode_New(PyUnicode_GET_LENGTH(unicode), + PyUnicode_MAX_CHAR_VALUE(unicode)); + if (!copy) { + return NULL; + } + if (PyUnicode_CopyCharacters(copy, 0, unicode, + 0, PyUnicode_GET_LENGTH(unicode)) < 0) + { + Py_DECREF(copy); + return NULL; + } + return copy; +} + +/* Test PyUnicode_New() */ +static PyObject * +unicode_new(PyObject *self, PyObject *args) +{ + Py_ssize_t size; + unsigned int maxchar; + PyObject *result; + + if (!PyArg_ParseTuple(args, "nI", &size, &maxchar)) { + return NULL; + } + + result = PyUnicode_New(size, (Py_UCS4)maxchar); + if (!result) { + return NULL; + } + if (size > 0 && maxchar <= 0x10ffff && + PyUnicode_Fill(result, 0, size, (Py_UCS4)maxchar) < 0) + { + Py_DECREF(result); + return NULL; + } + return result; +} + +/* Test PyUnicode_Fill() */ +static PyObject * +unicode_fill(PyObject *self, PyObject *args) +{ + PyObject *to, *to_copy; + Py_ssize_t start, length, filled; + unsigned int fill_char; + + if (!PyArg_ParseTuple(args, "OnnI", &to, &start, &length, &fill_char)) { + return NULL; + } + + NULLABLE(to); + if (!(to_copy = unicode_copy(to)) && to) { + return NULL; + } + + filled = PyUnicode_Fill(to_copy, start, length, (Py_UCS4)fill_char); + if (filled == -1 && PyErr_Occurred()) { + Py_DECREF(to_copy); + return NULL; + } + return Py_BuildValue("(Nn)", to_copy, filled); +} + +/* Test PyUnicode_WriteChar() */ +static PyObject * +unicode_writechar(PyObject *self, PyObject *args) +{ + PyObject *to, *to_copy; + Py_ssize_t index; + unsigned int character; + int result; + + if (!PyArg_ParseTuple(args, "OnI", &to, &index, &character)) { + return NULL; + } + + NULLABLE(to); + if (!(to_copy = unicode_copy(to)) && to) { + return NULL; + } + + result = PyUnicode_WriteChar(to_copy, index, (Py_UCS4)character); + if (result == -1 && PyErr_Occurred()) { + Py_DECREF(to_copy); + return NULL; + } + return Py_BuildValue("(Ni)", to_copy, result); +} + +/* Test PyUnicode_Resize() */ +static PyObject * +unicode_resize(PyObject *self, PyObject *args) +{ + PyObject *obj, *copy; + Py_ssize_t length; + int result; + + if (!PyArg_ParseTuple(args, "On", &obj, &length)) { + return NULL; + } + + NULLABLE(obj); + if (!(copy = unicode_copy(obj)) && obj) { + return NULL; + } + result = PyUnicode_Resize(©, length); + if (result == -1 && PyErr_Occurred()) { + Py_XDECREF(copy); + return NULL; + } + if (obj && PyUnicode_Check(obj) && length > PyUnicode_GET_LENGTH(obj)) { + if (PyUnicode_Fill(copy, PyUnicode_GET_LENGTH(obj), length, 0U) < 0) { + Py_DECREF(copy); + return NULL; + } + } + return Py_BuildValue("(Ni)", copy, result); +} + +/* Test PyUnicode_Append() */ +static PyObject * +unicode_append(PyObject *self, PyObject *args) +{ + PyObject *left, *right, *left_copy; + + if (!PyArg_ParseTuple(args, "OO", &left, &right)) + return NULL; + + NULLABLE(left); + NULLABLE(right); + if (!(left_copy = unicode_copy(left)) && left) { + return NULL; + } + PyUnicode_Append(&left_copy, right); + return left_copy; +} + +/* Test PyUnicode_AppendAndDel() */ +static PyObject * +unicode_appendanddel(PyObject *self, PyObject *args) +{ + PyObject *left, *right, *left_copy; + + if (!PyArg_ParseTuple(args, "OO", &left, &right)) + return NULL; + + NULLABLE(left); + NULLABLE(right); + if (!(left_copy = unicode_copy(left)) && left) { + return NULL; + } + Py_XINCREF(right); + PyUnicode_AppendAndDel(&left_copy, right); + return left_copy; +} + +/* Test PyUnicode_FromStringAndSize() */ +static PyObject * +unicode_fromstringandsize(PyObject *self, PyObject *args) +{ + const char *s; + Py_ssize_t bsize; + Py_ssize_t size = -100; + + if (!PyArg_ParseTuple(args, "z#|n", &s, &bsize, &size)) { + return NULL; + } + + if (size == -100) { + size = bsize; + } + return PyUnicode_FromStringAndSize(s, size); +} + +/* Test PyUnicode_FromString() */ +static PyObject * +unicode_fromstring(PyObject *self, PyObject *arg) +{ + const char *s; + Py_ssize_t size; + + if (!PyArg_Parse(arg, "z#", &s, &size)) { + return NULL; + } + return PyUnicode_FromString(s); +} + +/* Test PyUnicode_FromKindAndData() */ +static PyObject * +unicode_fromkindanddata(PyObject *self, PyObject *args) +{ + int kind; + void *buffer; + Py_ssize_t bsize; + Py_ssize_t size = -100; + + if (!PyArg_ParseTuple(args, "iz#|n", &kind, &buffer, &bsize, &size)) { + return NULL; + } + + if (size == -100) { + size = bsize; + } + if (kind && size % kind) { + PyErr_SetString(PyExc_AssertionError, + "invalid size in unicode_fromkindanddata()"); + return NULL; + } + return PyUnicode_FromKindAndData(kind, buffer, kind ? size / kind : 0); +} + +/* Test PyUnicode_Substring() */ +static PyObject * +unicode_substring(PyObject *self, PyObject *args) +{ + PyObject *str; + Py_ssize_t start, end; + + if (!PyArg_ParseTuple(args, "Onn", &str, &start, &end)) { + return NULL; + } + + NULLABLE(str); + return PyUnicode_Substring(str, start, end); +} + +/* Test PyUnicode_GetLength() */ +static PyObject * +unicode_getlength(PyObject *self, PyObject *arg) +{ + Py_ssize_t result; + + NULLABLE(arg); + result = PyUnicode_GetLength(arg); + if (result == -1) + return NULL; + return PyLong_FromSsize_t(result); +} + +/* Test PyUnicode_ReadChar() */ +static PyObject * +unicode_readchar(PyObject *self, PyObject *args) +{ + PyObject *unicode; + Py_ssize_t index; + Py_UCS4 result; + + if (!PyArg_ParseTuple(args, "On", &unicode, &index)) { + return NULL; + } + + NULLABLE(unicode); + result = PyUnicode_ReadChar(unicode, index); + if (result == (Py_UCS4)-1) + return NULL; + return PyLong_FromUnsignedLong(result); +} + /* Test PyUnicode_FromObject() */ static PyObject * unicode_fromobject(PyObject *self, PyObject *arg) @@ -110,6 +382,51 @@ unicode_fromobject(PyObject *self, PyObject *arg) return PyUnicode_FromObject(arg); } +/* Test PyUnicode_InternInPlace() */ +static PyObject * +unicode_interninplace(PyObject *self, PyObject *arg) +{ + NULLABLE(arg); + Py_XINCREF(arg); + PyUnicode_InternInPlace(&arg); + return arg; +} + +/* Test PyUnicode_InternFromString() */ +static PyObject * +unicode_internfromstring(PyObject *self, PyObject *arg) +{ + const char *s; + Py_ssize_t size; + + if (!PyArg_Parse(arg, "z#", &s, &size)) { + return NULL; + } + return PyUnicode_InternFromString(s); +} + +/* Test PyUnicode_FromWideChar() */ +static PyObject * +unicode_fromwidechar(PyObject *self, PyObject *args) +{ + const char *s; + Py_ssize_t bsize; + Py_ssize_t size = -100; + + if (!PyArg_ParseTuple(args, "z#|n", &s, &bsize, &size)) { + return NULL; + } + if (size == -100) { + if (bsize % SIZEOF_WCHAR_T) { + PyErr_SetString(PyExc_AssertionError, + "invalid size in unicode_fromwidechar()"); + return NULL; + } + size = bsize / SIZEOF_WCHAR_T; + } + return PyUnicode_FromWideChar((const wchar_t *)s, size); +} + /* Test PyUnicode_AsWideChar() */ static PyObject * unicode_aswidechar(PyObject *self, PyObject *args) @@ -118,8 +435,9 @@ unicode_aswidechar(PyObject *self, PyObject *args) Py_ssize_t buflen, size; wchar_t *buffer; - if (!PyArg_ParseTuple(args, "Un", &unicode, &buflen)) + if (!PyArg_ParseTuple(args, "On", &unicode, &buflen)) return NULL; + NULLABLE(unicode); buffer = PyMem_New(wchar_t, buflen); if (buffer == NULL) return PyErr_NoMemory(); @@ -142,17 +460,35 @@ unicode_aswidechar(PyObject *self, PyObject *args) return Py_BuildValue("(Nn)", result, size); } +/* Test PyUnicode_AsWideCharString() with NULL as buffer */ +static PyObject * +unicode_aswidechar_null(PyObject *self, PyObject *args) +{ + PyObject *unicode; + Py_ssize_t buflen, size; + + if (!PyArg_ParseTuple(args, "On", &unicode, &buflen)) + return NULL; + NULLABLE(unicode); + size = PyUnicode_AsWideChar(unicode, NULL, buflen); + if (size == -1) { + return NULL; + } + return PyLong_FromSsize_t(size); +} + /* Test PyUnicode_AsWideCharString() */ static PyObject * unicode_aswidecharstring(PyObject *self, PyObject *args) { PyObject *unicode, *result; - Py_ssize_t size; + Py_ssize_t size = 100; wchar_t *buffer; - if (!PyArg_ParseTuple(args, "U", &unicode)) + if (!PyArg_ParseTuple(args, "O", &unicode)) return NULL; + NULLABLE(unicode); buffer = PyUnicode_AsWideCharString(unicode, &size); if (buffer == NULL) return NULL; @@ -164,6 +500,28 @@ unicode_aswidecharstring(PyObject *self, PyObject *args) return Py_BuildValue("(Nn)", result, size); } +/* Test PyUnicode_AsWideCharString() with NULL as the size address */ +static PyObject * +unicode_aswidecharstring_null(PyObject *self, PyObject *args) +{ + PyObject *unicode, *result; + wchar_t *buffer; + + if (!PyArg_ParseTuple(args, "O", &unicode)) + return NULL; + + NULLABLE(unicode); + buffer = PyUnicode_AsWideCharString(unicode, NULL); + if (buffer == NULL) + return NULL; + + result = PyUnicode_FromWideChar(buffer, -1); + PyMem_Free(buffer); + if (result == NULL) + return NULL; + return result; +} + /* Test PyUnicode_AsUCS4() */ static PyObject * unicode_asucs4(PyObject *self, PyObject *args) @@ -173,10 +531,11 @@ unicode_asucs4(PyObject *self, PyObject *args) int copy_null; Py_ssize_t str_len, buf_len; - if (!PyArg_ParseTuple(args, "Unp:unicode_asucs4", &unicode, &str_len, ©_null)) { + if (!PyArg_ParseTuple(args, "Onp:unicode_asucs4", &unicode, &str_len, ©_null)) { return NULL; } + NULLABLE(unicode); buf_len = str_len + 1; buffer = PyMem_NEW(Py_UCS4, buf_len); if (buffer == NULL) { @@ -195,48 +554,117 @@ unicode_asucs4(PyObject *self, PyObject *args) return result; } -/* Test PyUnicode_AsUTF8() */ +/* Test PyUnicode_AsUCS4Copy() */ static PyObject * -unicode_asutf8(PyObject *self, PyObject *args) +unicode_asucs4copy(PyObject *self, PyObject *args) { PyObject *unicode; - const char *buffer; + Py_UCS4 *buffer; + PyObject *result; - if (!PyArg_ParseTuple(args, "U", &unicode)) { + if (!PyArg_ParseTuple(args, "O", &unicode)) { return NULL; } - buffer = PyUnicode_AsUTF8(unicode); + NULLABLE(unicode); + buffer = PyUnicode_AsUCS4Copy(unicode); if (buffer == NULL) { return NULL; } + result = PyUnicode_FromKindAndData(PyUnicode_4BYTE_KIND, + buffer, + PyUnicode_GET_LENGTH(unicode) + 1); + PyMem_FREE(buffer); + return result; +} + +/* Test PyUnicode_FromOrdinal() */ +static PyObject * +unicode_fromordinal(PyObject *self, PyObject *args) +{ + int ordinal; + + if (!PyArg_ParseTuple(args, "i", &ordinal)) + return NULL; - return PyBytes_FromString(buffer); + return PyUnicode_FromOrdinal(ordinal); +} + +/* Test PyUnicode_AsUTF8() */ +static PyObject * +unicode_asutf8(PyObject *self, PyObject *args) +{ + PyObject *unicode; + Py_ssize_t buflen; + const char *s; + + if (!PyArg_ParseTuple(args, "On", &unicode, &buflen)) + return NULL; + + NULLABLE(unicode); + s = PyUnicode_AsUTF8(unicode); + if (s == NULL) + return NULL; + + return PyBytes_FromStringAndSize(s, buflen); } /* Test PyUnicode_AsUTF8AndSize() */ static PyObject * unicode_asutf8andsize(PyObject *self, PyObject *args) { - PyObject *unicode, *result; - const char *buffer; - Py_ssize_t utf8_len; + PyObject *unicode; + Py_ssize_t buflen; + const char *s; + Py_ssize_t size = -100; - if(!PyArg_ParseTuple(args, "U", &unicode)) { + if (!PyArg_ParseTuple(args, "On", &unicode, &buflen)) return NULL; - } - buffer = PyUnicode_AsUTF8AndSize(unicode, &utf8_len); - if (buffer == NULL) { + NULLABLE(unicode); + s = PyUnicode_AsUTF8AndSize(unicode, &size); + if (s == NULL) return NULL; - } - result = PyBytes_FromString(buffer); - if (result == NULL) { + return Py_BuildValue("(y#n)", s, buflen, size); +} + +/* Test PyUnicode_AsUTF8AndSize() with NULL as the size address */ +static PyObject * +unicode_asutf8andsize_null(PyObject *self, PyObject *args) +{ + PyObject *unicode; + Py_ssize_t buflen; + const char *s; + + if (!PyArg_ParseTuple(args, "On", &unicode, &buflen)) return NULL; - } - return Py_BuildValue("(Nn)", result, utf8_len); + NULLABLE(unicode); + s = PyUnicode_AsUTF8AndSize(unicode, NULL); + if (s == NULL) + return NULL; + + return PyBytes_FromStringAndSize(s, buflen); +} + +/* Test PyUnicode_GetDefaultEncoding() */ +static PyObject * +unicode_getdefaultencoding(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + const char *s = PyUnicode_GetDefaultEncoding(); + if (s == NULL) + return NULL; + + return PyBytes_FromString(s); +} + +/* Test _PyUnicode_TransformDecimalAndSpaceToASCII() */ +static PyObject * +unicode_transformdecimalandspacetoascii(PyObject *self, PyObject *arg) +{ + NULLABLE(arg); + return _PyUnicode_TransformDecimalAndSpaceToASCII(arg); } /* Test PyUnicode_DecodeUTF8() */ @@ -470,11 +898,11 @@ unicode_findchar(PyObject *self, PyObject *args) Py_ssize_t result; Py_ssize_t start, end; - if (!PyArg_ParseTuple(args, "UInni:unicode_findchar", &str, &ch, + if (!PyArg_ParseTuple(args, "OInni:unicode_findchar", &str, &ch, &start, &end, &direction)) { return NULL; } - + NULLABLE(str); result = PyUnicode_FindChar(str, (Py_UCS4)ch, start, end, direction); if (result == -2) return NULL; @@ -612,11 +1040,12 @@ unicode_copycharacters(PyObject *self, PyObject *args) PyObject *from, *to, *to_copy; Py_ssize_t from_start, to_start, how_many, copied; - if (!PyArg_ParseTuple(args, "UnOnn:unicode_copycharacters", &to, &to_start, + if (!PyArg_ParseTuple(args, "UnOnn", &to, &to_start, &from, &from_start, &how_many)) { return NULL; } + NULLABLE(from); if (!(to_copy = PyUnicode_New(PyUnicode_GET_LENGTH(to), PyUnicode_MAX_CHAR_VALUE(to)))) { return NULL; @@ -626,8 +1055,9 @@ unicode_copycharacters(PyObject *self, PyObject *args) return NULL; } - if ((copied = PyUnicode_CopyCharacters(to_copy, to_start, from, - from_start, how_many)) < 0) { + copied = PyUnicode_CopyCharacters(to_copy, to_start, from, + from_start, how_many); + if (copied == -1 && PyErr_Occurred()) { Py_DECREF(to_copy); return NULL; } @@ -1053,14 +1483,36 @@ static PyMethodDef TestMethods[] = { test_unicode_compare_with_ascii, METH_NOARGS}, {"test_string_from_format", test_string_from_format, METH_NOARGS}, {"test_widechar", test_widechar, METH_NOARGS}, + {"unicode_new", unicode_new, METH_VARARGS}, + {"unicode_fill", unicode_fill, METH_VARARGS}, + {"unicode_writechar", unicode_writechar, METH_VARARGS}, + {"unicode_resize", unicode_resize, METH_VARARGS}, + {"unicode_append", unicode_append, METH_VARARGS}, + {"unicode_appendanddel", unicode_appendanddel, METH_VARARGS}, + {"unicode_fromstringandsize",unicode_fromstringandsize, METH_VARARGS}, + {"unicode_fromstring", unicode_fromstring, METH_O}, + {"unicode_fromkindanddata", unicode_fromkindanddata, METH_VARARGS}, + {"unicode_substring", unicode_substring, METH_VARARGS}, + {"unicode_getlength", unicode_getlength, METH_O}, + {"unicode_readchar", unicode_readchar, METH_VARARGS}, {"unicode_fromobject", unicode_fromobject, METH_O}, + {"unicode_interninplace", unicode_interninplace, METH_O}, + {"unicode_internfromstring", unicode_internfromstring, METH_O}, + {"unicode_fromwidechar", unicode_fromwidechar, METH_VARARGS}, {"unicode_aswidechar", unicode_aswidechar, METH_VARARGS}, + {"unicode_aswidechar_null", unicode_aswidechar_null, METH_VARARGS}, {"unicode_aswidecharstring", unicode_aswidecharstring, METH_VARARGS}, + {"unicode_aswidecharstring_null",unicode_aswidecharstring_null,METH_VARARGS}, {"unicode_asucs4", unicode_asucs4, METH_VARARGS}, + {"unicode_asucs4copy", unicode_asucs4copy, METH_VARARGS}, + {"unicode_fromordinal", unicode_fromordinal, METH_VARARGS}, {"unicode_asutf8", unicode_asutf8, METH_VARARGS}, {"unicode_asutf8andsize", unicode_asutf8andsize, METH_VARARGS}, + {"unicode_asutf8andsize_null",unicode_asutf8andsize_null, METH_VARARGS}, {"unicode_decodeutf8", unicode_decodeutf8, METH_VARARGS}, {"unicode_decodeutf8stateful",unicode_decodeutf8stateful, METH_VARARGS}, + {"unicode_getdefaultencoding",unicode_getdefaultencoding, METH_NOARGS}, + {"unicode_transformdecimalandspacetoascii", unicode_transformdecimalandspacetoascii, METH_O}, {"unicode_concat", unicode_concat, METH_VARARGS}, {"unicode_splitlines", unicode_splitlines, METH_VARARGS}, {"unicode_split", unicode_split, METH_VARARGS}, diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index c1892f6fa0a4b8..79ab7d3f5555c2 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -1488,6 +1488,7 @@ run_in_subinterp_with_config(PyObject *self, PyObject *args, PyObject *kwargs) int allow_threads = -1; int allow_daemon_threads = -1; int check_multi_interp_extensions = -1; + int own_gil = -1; int r; PyThreadState *substate, *mainstate; /* only initialise 'cflags.cf_flags' to test backwards compatibility */ @@ -1500,13 +1501,15 @@ run_in_subinterp_with_config(PyObject *self, PyObject *args, PyObject *kwargs) "allow_threads", "allow_daemon_threads", "check_multi_interp_extensions", + "own_gil", NULL}; if (!PyArg_ParseTupleAndKeywords(args, kwargs, - "s$pppppp:run_in_subinterp_with_config", kwlist, + "s$ppppppp:run_in_subinterp_with_config", kwlist, &code, &use_main_obmalloc, &allow_fork, &allow_exec, &allow_threads, &allow_daemon_threads, - &check_multi_interp_extensions)) { + &check_multi_interp_extensions, + &own_gil)) { return NULL; } if (use_main_obmalloc < 0) { @@ -1525,6 +1528,10 @@ run_in_subinterp_with_config(PyObject *self, PyObject *args, PyObject *kwargs) PyErr_SetString(PyExc_ValueError, "missing allow_threads"); return NULL; } + if (own_gil < 0) { + PyErr_SetString(PyExc_ValueError, "missing own_gil"); + return NULL; + } if (allow_daemon_threads < 0) { PyErr_SetString(PyExc_ValueError, "missing allow_daemon_threads"); return NULL; @@ -1538,15 +1545,16 @@ run_in_subinterp_with_config(PyObject *self, PyObject *args, PyObject *kwargs) PyThreadState_Swap(NULL); - const _PyInterpreterConfig config = { + const PyInterpreterConfig config = { .use_main_obmalloc = use_main_obmalloc, .allow_fork = allow_fork, .allow_exec = allow_exec, .allow_threads = allow_threads, .allow_daemon_threads = allow_daemon_threads, .check_multi_interp_extensions = check_multi_interp_extensions, + .own_gil = own_gil, }; - PyStatus status = _Py_NewInterpreterFromConfig(&substate, &config); + PyStatus status = Py_NewInterpreterFromConfig(&substate, &config); if (PyStatus_Exception(status)) { /* Since no new thread state was created, there is no exception to propagate; raise a fresh one after swapping in the old thread @@ -3363,7 +3371,7 @@ test_gc_visit_objects_basic(PyObject *Py_UNUSED(self), } state.target = obj; state.found = 0; - + PyUnstable_GC_VisitObjects(gc_visit_callback_basic, &state); Py_DECREF(obj); if (!state.found) { @@ -3400,6 +3408,98 @@ test_gc_visit_objects_exit_early(PyObject *Py_UNUSED(self), Py_RETURN_NONE; } +typedef struct { + PyObject_HEAD +} ObjExtraData; + +static PyObject * +obj_extra_data_new(PyTypeObject *type, PyObject *args, PyObject *kwds) +{ + size_t extra_size = sizeof(PyObject *); + PyObject *obj = PyUnstable_Object_GC_NewWithExtraData(type, extra_size); + if (obj == NULL) { + return PyErr_NoMemory(); + } + PyObject_GC_Track(obj); + return obj; +} + +static PyObject ** +obj_extra_data_get_extra_storage(PyObject *self) +{ + return (PyObject **)((char *)self + Py_TYPE(self)->tp_basicsize); +} + +static PyObject * +obj_extra_data_get(PyObject *self, void *Py_UNUSED(ignored)) +{ + PyObject **extra_storage = obj_extra_data_get_extra_storage(self); + PyObject *value = *extra_storage; + if (!value) { + Py_RETURN_NONE; + } + return Py_NewRef(value); +} + +static int +obj_extra_data_set(PyObject *self, PyObject *newval, void *Py_UNUSED(ignored)) +{ + PyObject **extra_storage = obj_extra_data_get_extra_storage(self); + Py_CLEAR(*extra_storage); + if (newval) { + *extra_storage = Py_NewRef(newval); + } + return 0; +} + +static PyGetSetDef obj_extra_data_getset[] = { + {"extra", (getter)obj_extra_data_get, (setter)obj_extra_data_set, NULL}, + {NULL} +}; + +static int +obj_extra_data_traverse(PyObject *self, visitproc visit, void *arg) +{ + PyObject **extra_storage = obj_extra_data_get_extra_storage(self); + PyObject *value = *extra_storage; + Py_VISIT(value); + return 0; +} + +static int +obj_extra_data_clear(PyObject *self) +{ + PyObject **extra_storage = obj_extra_data_get_extra_storage(self); + Py_CLEAR(*extra_storage); + return 0; +} + +static void +obj_extra_data_dealloc(PyObject *self) +{ + PyTypeObject *tp = Py_TYPE(self); + PyObject_GC_UnTrack(self); + obj_extra_data_clear(self); + tp->tp_free(self); + Py_DECREF(tp); +} + +static PyType_Slot ObjExtraData_Slots[] = { + {Py_tp_getset, obj_extra_data_getset}, + {Py_tp_dealloc, obj_extra_data_dealloc}, + {Py_tp_traverse, obj_extra_data_traverse}, + {Py_tp_clear, obj_extra_data_clear}, + {Py_tp_new, obj_extra_data_new}, + {Py_tp_free, PyObject_GC_Del}, + {0, NULL}, +}; + +static PyType_Spec ObjExtraData_TypeSpec = { + .name = "_testcapi.ObjExtraData", + .basicsize = sizeof(ObjExtraData), + .flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, + .slots = ObjExtraData_Slots, +}; struct atexit_data { int called; @@ -3867,7 +3967,6 @@ static PyTypeObject MyList_Type = { MyList_new, /* tp_new */ }; - /* Test PEP 560 */ typedef struct { @@ -4124,6 +4223,17 @@ PyInit__testcapi(void) Py_INCREF(&MethStatic_Type); PyModule_AddObject(m, "MethStatic", (PyObject *)&MethStatic_Type); + PyObject *ObjExtraData_Type = PyType_FromModuleAndSpec( + m, &ObjExtraData_TypeSpec, NULL); + if (ObjExtraData_Type == 0) { + return NULL; + } + int ret = PyModule_AddType(m, (PyTypeObject*)ObjExtraData_Type); + Py_DECREF(ObjExtraData_Type); + if (ret < 0) { + return NULL; + } + PyModule_AddObject(m, "CHAR_MAX", PyLong_FromLong(CHAR_MAX)); PyModule_AddObject(m, "CHAR_MIN", PyLong_FromLong(CHAR_MIN)); PyModule_AddObject(m, "UCHAR_MAX", PyLong_FromLong(UCHAR_MAX)); @@ -4145,6 +4255,7 @@ PyInit__testcapi(void) PyModule_AddObject(m, "ULLONG_MAX", PyLong_FromUnsignedLongLong(ULLONG_MAX)); PyModule_AddObject(m, "PY_SSIZE_T_MAX", PyLong_FromSsize_t(PY_SSIZE_T_MAX)); PyModule_AddObject(m, "PY_SSIZE_T_MIN", PyLong_FromSsize_t(PY_SSIZE_T_MIN)); + PyModule_AddObject(m, "SIZEOF_WCHAR_T", PyLong_FromSsize_t(sizeof(wchar_t))); PyModule_AddObject(m, "SIZEOF_TIME_T", PyLong_FromSsize_t(sizeof(time_t))); PyModule_AddObject(m, "Py_Version", PyLong_FromUnsignedLong(Py_Version)); Py_INCREF(&PyInstanceMethod_Type); @@ -4207,9 +4318,15 @@ PyInit__testcapi(void) if (_PyTestCapi_Init_Code(m) < 0) { return NULL; } + if (_PyTestCapi_Init_Buffer(m) < 0) { + return NULL; + } if (_PyTestCapi_Init_PyOS(m) < 0) { return NULL; } + if (_PyTestCapi_Init_Immortal(m) < 0) { + return NULL; + } #ifndef LIMITED_API_AVAILABLE PyModule_AddObjectRef(m, "LIMITED_API_AVAILABLE", Py_False); @@ -4218,6 +4335,9 @@ PyInit__testcapi(void) if (_PyTestCapi_Init_VectorcallLimited(m) < 0) { return NULL; } + if (_PyTestCapi_Init_HeaptypeRelative(m) < 0) { + return NULL; + } #endif PyState_AddModule(m, &_testcapimodule); diff --git a/Modules/_testclinic.c b/Modules/_testclinic.c index 91fdee24d328d9..6ff55a2755cf5a 100644 --- a/Modules/_testclinic.c +++ b/Modules/_testclinic.c @@ -9,6 +9,19 @@ #include "Python.h" + +// Used for clone_with_conv_f1 and clone_with_conv_v2 +typedef struct { + const char *name; +} custom_t; + +static int +custom_converter(PyObject *obj, custom_t *val) +{ + return 1; +} + + #include "clinic/_testclinic.c.h" @@ -1117,6 +1130,70 @@ gh_99240_double_free_impl(PyObject *module, char *a, char *b) } +/*[clinic input] +_testclinic.clone_f1 as clone_f1 + path: str +[clinic start generated code]*/ + +static PyObject * +clone_f1_impl(PyObject *module, const char *path) +/*[clinic end generated code: output=8c30b5620ba86715 input=9c614b7f025ebf70]*/ +{ + Py_RETURN_NONE; +} + + +/*[clinic input] +_testclinic.clone_f2 as clone_f2 = _testclinic.clone_f1 +[clinic start generated code]*/ + +static PyObject * +clone_f2_impl(PyObject *module, const char *path) +/*[clinic end generated code: output=6aa1c39bec3f5d9b input=1aaaf47d6ed2324a]*/ +{ + Py_RETURN_NONE; +} + + +/*[python input] +class custom_t_converter(CConverter): + type = 'custom_t' + converter = 'custom_converter' + + def pre_render(self): + self.c_default = f'''{{ + .name = "{self.function.name}", + }}''' + +[python start generated code]*/ +/*[python end generated code: output=da39a3ee5e6b4b0d input=b2fb801e99a06bf6]*/ + + +/*[clinic input] +_testclinic.clone_with_conv_f1 as clone_with_conv_f1 + path: custom_t = None +[clinic start generated code]*/ + +static PyObject * +clone_with_conv_f1_impl(PyObject *module, custom_t path) +/*[clinic end generated code: output=f7e030ffd5439cb0 input=bc77bc80dec3f46d]*/ +{ + return PyUnicode_FromString(path.name); +} + + +/*[clinic input] +_testclinic.clone_with_conv_f2 as clone_with_conv_f2 = _testclinic.clone_with_conv_f1 +[clinic start generated code]*/ + +static PyObject * +clone_with_conv_f2_impl(PyObject *module, custom_t path) +/*[clinic end generated code: output=9d7fdd6a75eecee4 input=cff459a205fa83bb]*/ +{ + return PyUnicode_FromString(path.name); +} + + static PyMethodDef tester_methods[] = { TEST_EMPTY_FUNCTION_METHODDEF OBJECTS_CONVERTER_METHODDEF @@ -1168,6 +1245,10 @@ static PyMethodDef tester_methods[] = { GH_32092_KW_PASS_METHODDEF GH_99233_REFCOUNT_METHODDEF GH_99240_DOUBLE_FREE_METHODDEF + CLONE_F1_METHODDEF + CLONE_F2_METHODDEF + CLONE_WITH_CONV_F1_METHODDEF + CLONE_WITH_CONV_F2_METHODDEF {NULL, NULL} }; diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c index 632fac2de0c419..ea9b6e72b3c924 100644 --- a/Modules/_testinternalcapi.c +++ b/Modules/_testinternalcapi.c @@ -14,7 +14,7 @@ #include "Python.h" #include "pycore_atomic_funcs.h" // _Py_atomic_int_get() #include "pycore_bitutils.h" // _Py_bswap32() -#include "pycore_compile.h" // _PyCompile_CodeGen, _PyCompile_OptimizeCfg +#include "pycore_compile.h" // _PyCompile_CodeGen, _PyCompile_OptimizeCfg, _PyCompile_Assemble #include "pycore_fileutils.h" // _Py_normpath #include "pycore_frame.h" // _PyInterpreterFrame #include "pycore_gc.h" // PyGC_Head @@ -593,17 +593,19 @@ _testinternalcapi.compiler_codegen -> object ast: object filename: object optimize: int + compile_mode: int = 0 Apply compiler code generation to an AST. [clinic start generated code]*/ static PyObject * _testinternalcapi_compiler_codegen_impl(PyObject *module, PyObject *ast, - PyObject *filename, int optimize) -/*[clinic end generated code: output=fbbbbfb34700c804 input=e9fbe6562f7f75e4]*/ + PyObject *filename, int optimize, + int compile_mode) +/*[clinic end generated code: output=40a68f6e13951cc8 input=a0e00784f1517cd7]*/ { PyCompilerFlags *flags = NULL; - return _PyCompile_CodeGen(ast, filename, flags, optimize); + return _PyCompile_CodeGen(ast, filename, flags, optimize, compile_mode); } @@ -625,6 +627,70 @@ _testinternalcapi_optimize_cfg_impl(PyObject *module, PyObject *instructions, return _PyCompile_OptimizeCfg(instructions, consts); } +static int +get_nonnegative_int_from_dict(PyObject *dict, const char *key) { + PyObject *obj = PyDict_GetItemString(dict, key); + if (obj == NULL) { + return -1; + } + return PyLong_AsLong(obj); +} + +/*[clinic input] + +_testinternalcapi.assemble_code_object -> object + + filename: object + instructions: object + metadata: object + +Create a code object for the given instructions. +[clinic start generated code]*/ + +static PyObject * +_testinternalcapi_assemble_code_object_impl(PyObject *module, + PyObject *filename, + PyObject *instructions, + PyObject *metadata) +/*[clinic end generated code: output=38003dc16a930f48 input=e713ad77f08fb3a8]*/ + +{ + assert(PyDict_Check(metadata)); + _PyCompile_CodeUnitMetadata umd; + + umd.u_name = PyDict_GetItemString(metadata, "name"); + umd.u_qualname = PyDict_GetItemString(metadata, "qualname"); + + assert(PyUnicode_Check(umd.u_name)); + assert(PyUnicode_Check(umd.u_qualname)); + + umd.u_consts = PyDict_GetItemString(metadata, "consts"); + umd.u_names = PyDict_GetItemString(metadata, "names"); + umd.u_varnames = PyDict_GetItemString(metadata, "varnames"); + umd.u_cellvars = PyDict_GetItemString(metadata, "cellvars"); + umd.u_freevars = PyDict_GetItemString(metadata, "freevars"); + umd.u_fasthidden = PyDict_GetItemString(metadata, "fasthidden"); + + assert(PyDict_Check(umd.u_consts)); + assert(PyDict_Check(umd.u_names)); + assert(PyDict_Check(umd.u_varnames)); + assert(PyDict_Check(umd.u_cellvars)); + assert(PyDict_Check(umd.u_freevars)); + assert(PyDict_Check(umd.u_fasthidden)); + + umd.u_argcount = get_nonnegative_int_from_dict(metadata, "argcount"); + umd.u_posonlyargcount = get_nonnegative_int_from_dict(metadata, "posonlyargcount"); + umd.u_kwonlyargcount = get_nonnegative_int_from_dict(metadata, "kwonlyargcount"); + umd.u_firstlineno = get_nonnegative_int_from_dict(metadata, "firstlineno"); + + assert(umd.u_argcount >= 0); + assert(umd.u_posonlyargcount >= 0); + assert(umd.u_kwonlyargcount >= 0); + assert(umd.u_firstlineno >= 0); + + return (PyObject*)_PyCompile_Assemble(&umd, filename, instructions); +} + static PyObject * get_interp_settings(PyObject *self, PyObject *args) @@ -667,6 +733,13 @@ get_interp_settings(PyObject *self, PyObject *args) return NULL; } + /* "own GIL" */ + PyObject *own_gil = interp->ceval.own_gil ? Py_True : Py_False; + if (PyDict_SetItemString(settings, "own_gil", own_gil) != 0) { + Py_DECREF(settings); + return NULL; + } + return settings; } @@ -705,6 +778,7 @@ static PyMethodDef module_functions[] = { {"set_eval_frame_record", set_eval_frame_record, METH_O, NULL}, _TESTINTERNALCAPI_COMPILER_CODEGEN_METHODDEF _TESTINTERNALCAPI_OPTIMIZE_CFG_METHODDEF + _TESTINTERNALCAPI_ASSEMBLE_CODE_OBJECT_METHODDEF {"get_interp_settings", get_interp_settings, METH_VARARGS, NULL}, {"clear_extension", clear_extension, METH_VARARGS, NULL}, {NULL, NULL} /* sentinel */ @@ -726,6 +800,7 @@ module_exec(PyObject *module) static struct PyModuleDef_Slot module_slots[] = { {Py_mod_exec, module_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL}, }; diff --git a/Modules/_testmultiphase.c b/Modules/_testmultiphase.c index cf8990a2df0a9b..ca71b6156b005d 100644 --- a/Modules/_testmultiphase.c +++ b/Modules/_testmultiphase.c @@ -441,6 +441,7 @@ static int execfunc(PyObject *m) static PyModuleDef_Slot main_slots[] = { {Py_mod_exec, execfunc}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL}, }; @@ -680,6 +681,27 @@ PyInit__testmultiphase_export_unreported_exception(void) return PyModuleDef_Init(&main_def); } +static PyObject* +createfunc_noop(PyObject *spec, PyModuleDef *def) +{ + return PyModule_New("spam"); +} + +static PyModuleDef_Slot slots_multiple_create_slots[] = { + {Py_mod_create, createfunc_noop}, + {Py_mod_create, createfunc_noop}, + {0, NULL}, +}; + +static PyModuleDef def_multiple_create_slots = TEST_MODULE_DEF( + "_testmultiphase_multiple_create_slots", slots_multiple_create_slots, NULL); + +PyMODINIT_FUNC +PyInit__testmultiphase_multiple_create_slots(void) +{ + return PyModuleDef_Init(&def_multiple_create_slots); +} + static PyObject* createfunc_null(PyObject *spec, PyModuleDef *def) { @@ -745,6 +767,7 @@ PyInit__testmultiphase_create_unreported_exception(void) static PyModuleDef_Slot slots_nonmodule_with_exec_slots[] = { {Py_mod_create, createfunc_nonmodule}, {Py_mod_exec, execfunc}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL}, }; @@ -765,6 +788,7 @@ execfunc_err(PyObject *mod) static PyModuleDef_Slot slots_exec_err[] = { {Py_mod_exec, execfunc_err}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL}, }; @@ -786,6 +810,7 @@ execfunc_raise(PyObject *spec) static PyModuleDef_Slot slots_exec_raise[] = { {Py_mod_exec, execfunc_raise}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL}, }; @@ -807,6 +832,7 @@ execfunc_unreported_exception(PyObject *mod) static PyModuleDef_Slot slots_exec_unreported_exception[] = { {Py_mod_exec, execfunc_unreported_exception}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL}, }; @@ -845,6 +871,7 @@ meth_state_access_exec(PyObject *m) static PyModuleDef_Slot meth_state_access_slots[] = { {Py_mod_exec, meth_state_access_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; @@ -884,3 +911,59 @@ PyInit__test_module_state_shared(void) } return module; } + + +/* multiple interpreters support */ + +static PyModuleDef_Slot slots_multiple_multiple_interpreters_slots[] = { + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, + {0, NULL}, +}; + +static PyModuleDef def_multiple_multiple_interpreters_slots = TEST_MODULE_DEF( + "_testmultiphase_multiple_multiple_interpreters_slots", + slots_multiple_multiple_interpreters_slots, + NULL); + +PyMODINIT_FUNC +PyInit__testmultiphase_multiple_multiple_interpreters_slots(void) +{ + return PyModuleDef_Init(&def_multiple_multiple_interpreters_slots); +} + +static PyModuleDef_Slot non_isolated_slots[] = { + {Py_mod_exec, execfunc}, + {Py_mod_multiple_interpreters, Py_MOD_MULTIPLE_INTERPRETERS_NOT_SUPPORTED}, + {0, NULL}, +}; + +static PyModuleDef non_isolated_def = TEST_MODULE_DEF("_test_non_isolated", + non_isolated_slots, + testexport_methods); + +PyMODINIT_FUNC +PyInit__test_non_isolated(void) +{ + return PyModuleDef_Init(&non_isolated_def); +} + + +static PyModuleDef_Slot shared_gil_only_slots[] = { + {Py_mod_exec, execfunc}, + /* Note that Py_MOD_MULTIPLE_INTERPRETERS_SUPPORTED is the default. + We put it here explicitly to draw attention to the contrast + with Py_MOD_PER_INTERPRETER_GIL_SUPPORTED. */ + {Py_mod_multiple_interpreters, Py_MOD_MULTIPLE_INTERPRETERS_SUPPORTED}, + {0, NULL}, +}; + +static PyModuleDef shared_gil_only_def = TEST_MODULE_DEF("_test_shared_gil_only", + shared_gil_only_slots, + testexport_methods); + +PyMODINIT_FUNC +PyInit__test_shared_gil_only(void) +{ + return PyModuleDef_Init(&shared_gil_only_def); +} diff --git a/Modules/_threadmodule.c b/Modules/_threadmodule.c index fd2fd9ab25f113..5d753b4a0ebc5e 100644 --- a/Modules/_threadmodule.c +++ b/Modules/_threadmodule.c @@ -1710,6 +1710,7 @@ The 'threading' module provides a more convenient interface."); static PyModuleDef_Slot thread_module_slots[] = { {Py_mod_exec, thread_module_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_tracemalloc.c b/Modules/_tracemalloc.c index d69c5636486da9..c5714d5e7d5a0f 100644 --- a/Modules/_tracemalloc.c +++ b/Modules/_tracemalloc.c @@ -7,6 +7,7 @@ #include "pycore_runtime.h" // _Py_ID() #include "pycore_traceback.h" #include +#include "frameobject.h" // _PyInterpreterFrame_GetLine #include // malloc() @@ -257,7 +258,7 @@ static void tracemalloc_get_frame(_PyInterpreterFrame *pyframe, frame_t *frame) { frame->filename = &_Py_STR(anon_unknown); - int lineno = _PyInterpreterFrame_GetLine(pyframe); + int lineno = PyUnstable_InterpreterFrame_GetLine(pyframe); if (lineno < 0) { lineno = 0; } diff --git a/Modules/_typingmodule.c b/Modules/_typingmodule.c index 262dddb63fd5fe..64286375636aff 100644 --- a/Modules/_typingmodule.c +++ b/Modules/_typingmodule.c @@ -36,6 +36,7 @@ PyDoc_STRVAR(typing_doc, "Accelerators for the typing module.\n"); static struct PyModuleDef_Slot _typingmodule_slots[] = { + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_uuidmodule.c b/Modules/_uuidmodule.c index eae38f5c98cc7f..ed3b2fedfd4d88 100644 --- a/Modules/_uuidmodule.c +++ b/Modules/_uuidmodule.c @@ -106,6 +106,7 @@ static PyMethodDef uuid_methods[] = { static PyModuleDef_Slot uuid_slots[] = { {Py_mod_exec, uuid_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_weakref.c b/Modules/_weakref.c index 157a852ae9a378..387b8fa9d0a6f1 100644 --- a/Modules/_weakref.c +++ b/Modules/_weakref.c @@ -174,6 +174,7 @@ weakref_exec(PyObject *module) static struct PyModuleDef_Slot weakref_slots[] = { {Py_mod_exec, weakref_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_winapi.c b/Modules/_winapi.c index fa380b8b798405..473bcb4736e925 100644 --- a/Modules/_winapi.c +++ b/Modules/_winapi.c @@ -2259,6 +2259,7 @@ static int winapi_exec(PyObject *m) static PyModuleDef_Slot winapi_slots[] = { {Py_mod_exec, winapi_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_xxinterpchannelsmodule.c b/Modules/_xxinterpchannelsmodule.c index 13b005eaef9866..616dd577688116 100644 --- a/Modules/_xxinterpchannelsmodule.c +++ b/Modules/_xxinterpchannelsmodule.c @@ -2418,6 +2418,7 @@ module_exec(PyObject *mod) static struct PyModuleDef_Slot module_slots[] = { {Py_mod_exec, module_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL}, }; diff --git a/Modules/_xxsubinterpretersmodule.c b/Modules/_xxsubinterpretersmodule.c index 884fb0d31f2b7f..d7daae254638ec 100644 --- a/Modules/_xxsubinterpretersmodule.c +++ b/Modules/_xxsubinterpretersmodule.c @@ -513,12 +513,13 @@ interp_create(PyObject *self, PyObject *args, PyObject *kwds) // Create and initialize the new interpreter. PyThreadState *save_tstate = _PyThreadState_GET(); - const _PyInterpreterConfig config = isolated - ? (_PyInterpreterConfig)_PyInterpreterConfig_INIT - : (_PyInterpreterConfig)_PyInterpreterConfig_LEGACY_INIT; + assert(save_tstate != NULL); + const PyInterpreterConfig config = isolated + ? (PyInterpreterConfig)_PyInterpreterConfig_INIT + : (PyInterpreterConfig)_PyInterpreterConfig_LEGACY_INIT; // XXX Possible GILState issues? PyThreadState *tstate = NULL; - PyStatus status = _Py_NewInterpreterFromConfig(&tstate, &config); + PyStatus status = Py_NewInterpreterFromConfig(&tstate, &config); PyThreadState_Swap(save_tstate); if (PyStatus_Exception(status)) { /* Since no new thread state was created, there is no exception to @@ -821,6 +822,7 @@ module_exec(PyObject *mod) static struct PyModuleDef_Slot module_slots[] = { {Py_mod_exec, module_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL}, }; diff --git a/Modules/_xxtestfuzz/fuzzer.c b/Modules/_xxtestfuzz/fuzzer.c index fb0c191d2c494d..37d402824853f0 100644 --- a/Modules/_xxtestfuzz/fuzzer.c +++ b/Modules/_xxtestfuzz/fuzzer.c @@ -526,13 +526,20 @@ int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) { #if !defined(_Py_FUZZ_ONE) || defined(_Py_FUZZ_fuzz_sre_compile) static int SRE_COMPILE_INITIALIZED = 0; if (!SRE_COMPILE_INITIALIZED && !init_sre_compile()) { - PyErr_Print(); - abort(); + if (!PyErr_ExceptionMatches(PyExc_DeprecationWarning)) { + PyErr_Print(); + abort(); + } + else { + PyErr_Clear(); + } } else { SRE_COMPILE_INITIALIZED = 1; } - rv |= _run_fuzz(data, size, fuzz_sre_compile); + if (SRE_COMPILE_INITIALIZED) { + rv |= _run_fuzz(data, size, fuzz_sre_compile); + } #endif #if !defined(_Py_FUZZ_ONE) || defined(_Py_FUZZ_fuzz_sre_match) static int SRE_MATCH_INITIALIZED = 0; diff --git a/Modules/_zoneinfo.c b/Modules/_zoneinfo.c index c215a75b804fdb..3b2d282d65cab9 100644 --- a/Modules/_zoneinfo.c +++ b/Modules/_zoneinfo.c @@ -2822,7 +2822,10 @@ zoneinfomodule_exec(PyObject *m) } static PyModuleDef_Slot zoneinfomodule_slots[] = { - {Py_mod_exec, zoneinfomodule_exec}, {0, NULL}}; + {Py_mod_exec, zoneinfomodule_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, + {0, NULL}, +}; static struct PyModuleDef zoneinfomodule = { .m_base = PyModuleDef_HEAD_INIT, diff --git a/Modules/arraymodule.c b/Modules/arraymodule.c index 798a7629257966..f94bbec8e0bb3c 100644 --- a/Modules/arraymodule.c +++ b/Modules/arraymodule.c @@ -3111,6 +3111,7 @@ array_modexec(PyObject *m) static PyModuleDef_Slot arrayslots[] = { {Py_mod_exec, array_modexec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/atexitmodule.c b/Modules/atexitmodule.c index 47afd7f0751039..5882d405636400 100644 --- a/Modules/atexitmodule.c +++ b/Modules/atexitmodule.c @@ -314,12 +314,18 @@ upon normal program termination.\n\ Two public functions, register and unregister, are defined.\n\ "); +static PyModuleDef_Slot atexitmodule_slots[] = { + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, + {0, NULL} +}; + static struct PyModuleDef atexitmodule = { PyModuleDef_HEAD_INIT, .m_name = "atexit", .m_doc = atexit__doc__, .m_size = 0, .m_methods = atexit_methods, + .m_slots = atexitmodule_slots, }; PyMODINIT_FUNC diff --git a/Modules/audioop.c b/Modules/audioop.c index 9325f82f9a17e0..604306d449265c 100644 --- a/Modules/audioop.c +++ b/Modules/audioop.c @@ -1975,6 +1975,7 @@ audioop_exec(PyObject* module) static PyModuleDef_Slot audioop_slots[] = { {Py_mod_exec, audioop_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/binascii.c b/Modules/binascii.c index 95ddb26988d6c9..4ecff4793be9a0 100644 --- a/Modules/binascii.c +++ b/Modules/binascii.c @@ -1291,6 +1291,7 @@ binascii_exec(PyObject *module) { static PyModuleDef_Slot binascii_slots[] = { {Py_mod_exec, binascii_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/cjkcodecs/cjkcodecs.h b/Modules/cjkcodecs/cjkcodecs.h index e553ff3e17b898..36bc7024df9acc 100644 --- a/Modules/cjkcodecs/cjkcodecs.h +++ b/Modules/cjkcodecs/cjkcodecs.h @@ -502,6 +502,7 @@ static struct PyMethodDef _cjk_methods[] = { static PyModuleDef_Slot _cjk_slots[] = { {Py_mod_exec, _cjk_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/cjkcodecs/multibytecodec.c b/Modules/cjkcodecs/multibytecodec.c index 233fc3020fd6a8..b501e4fb923232 100644 --- a/Modules/cjkcodecs/multibytecodec.c +++ b/Modules/cjkcodecs/multibytecodec.c @@ -2062,6 +2062,7 @@ static struct PyMethodDef _multibytecodec_methods[] = { static PyModuleDef_Slot _multibytecodec_slots[] = { {Py_mod_exec, _multibytecodec_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/clinic/_asynciomodule.c.h b/Modules/clinic/_asynciomodule.c.h index 43c5d771798634..6a780a80cd0bc4 100644 --- a/Modules/clinic/_asynciomodule.c.h +++ b/Modules/clinic/_asynciomodule.c.h @@ -482,14 +482,15 @@ _asyncio_Future__make_cancelled_error(FutureObj *self, PyObject *Py_UNUSED(ignor } PyDoc_STRVAR(_asyncio_Task___init____doc__, -"Task(coro, *, loop=None, name=None, context=None)\n" +"Task(coro, *, loop=None, name=None, context=None, eager_start=False)\n" "--\n" "\n" "A coroutine wrapped in a Future."); static int _asyncio_Task___init___impl(TaskObj *self, PyObject *coro, PyObject *loop, - PyObject *name, PyObject *context); + PyObject *name, PyObject *context, + int eager_start); static int _asyncio_Task___init__(PyObject *self, PyObject *args, PyObject *kwargs) @@ -497,14 +498,14 @@ _asyncio_Task___init__(PyObject *self, PyObject *args, PyObject *kwargs) int return_value = -1; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) - #define NUM_KEYWORDS 4 + #define NUM_KEYWORDS 5 static struct { PyGC_Head _this_is_not_used; PyObject_VAR_HEAD PyObject *ob_item[NUM_KEYWORDS]; } _kwtuple = { .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) - .ob_item = { &_Py_ID(coro), &_Py_ID(loop), &_Py_ID(name), &_Py_ID(context), }, + .ob_item = { &_Py_ID(coro), &_Py_ID(loop), &_Py_ID(name), &_Py_ID(context), &_Py_ID(eager_start), }, }; #undef NUM_KEYWORDS #define KWTUPLE (&_kwtuple.ob_base.ob_base) @@ -513,14 +514,14 @@ _asyncio_Task___init__(PyObject *self, PyObject *args, PyObject *kwargs) # define KWTUPLE NULL #endif // !Py_BUILD_CORE - static const char * const _keywords[] = {"coro", "loop", "name", "context", NULL}; + static const char * const _keywords[] = {"coro", "loop", "name", "context", "eager_start", NULL}; static _PyArg_Parser _parser = { .keywords = _keywords, .fname = "Task", .kwtuple = KWTUPLE, }; #undef KWTUPLE - PyObject *argsbuf[4]; + PyObject *argsbuf[5]; PyObject * const *fastargs; Py_ssize_t nargs = PyTuple_GET_SIZE(args); Py_ssize_t noptargs = nargs + (kwargs ? PyDict_GET_SIZE(kwargs) : 0) - 1; @@ -528,6 +529,7 @@ _asyncio_Task___init__(PyObject *self, PyObject *args, PyObject *kwargs) PyObject *loop = Py_None; PyObject *name = Py_None; PyObject *context = Py_None; + int eager_start = 0; fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser, 1, 1, 0, argsbuf); if (!fastargs) { @@ -549,9 +551,18 @@ _asyncio_Task___init__(PyObject *self, PyObject *args, PyObject *kwargs) goto skip_optional_kwonly; } } - context = fastargs[3]; + if (fastargs[3]) { + context = fastargs[3]; + if (!--noptargs) { + goto skip_optional_kwonly; + } + } + eager_start = PyObject_IsTrue(fastargs[4]); + if (eager_start < 0) { + goto exit; + } skip_optional_kwonly: - return_value = _asyncio_Task___init___impl((TaskObj *)self, coro, loop, name, context); + return_value = _asyncio_Task___init___impl((TaskObj *)self, coro, loop, name, context, eager_start); exit: return return_value; @@ -1064,6 +1075,63 @@ _asyncio__register_task(PyObject *module, PyObject *const *args, Py_ssize_t narg return return_value; } +PyDoc_STRVAR(_asyncio__register_eager_task__doc__, +"_register_eager_task($module, /, task)\n" +"--\n" +"\n" +"Register a new task in asyncio as executed by loop.\n" +"\n" +"Returns None."); + +#define _ASYNCIO__REGISTER_EAGER_TASK_METHODDEF \ + {"_register_eager_task", _PyCFunction_CAST(_asyncio__register_eager_task), METH_FASTCALL|METH_KEYWORDS, _asyncio__register_eager_task__doc__}, + +static PyObject * +_asyncio__register_eager_task_impl(PyObject *module, PyObject *task); + +static PyObject * +_asyncio__register_eager_task(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(task), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"task", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "_register_eager_task", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; + PyObject *task; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); + if (!args) { + goto exit; + } + task = args[0]; + return_value = _asyncio__register_eager_task_impl(module, task); + +exit: + return return_value; +} + PyDoc_STRVAR(_asyncio__unregister_task__doc__, "_unregister_task($module, /, task)\n" "--\n" @@ -1121,6 +1189,63 @@ _asyncio__unregister_task(PyObject *module, PyObject *const *args, Py_ssize_t na return return_value; } +PyDoc_STRVAR(_asyncio__unregister_eager_task__doc__, +"_unregister_eager_task($module, /, task)\n" +"--\n" +"\n" +"Unregister a task.\n" +"\n" +"Returns None."); + +#define _ASYNCIO__UNREGISTER_EAGER_TASK_METHODDEF \ + {"_unregister_eager_task", _PyCFunction_CAST(_asyncio__unregister_eager_task), METH_FASTCALL|METH_KEYWORDS, _asyncio__unregister_eager_task__doc__}, + +static PyObject * +_asyncio__unregister_eager_task_impl(PyObject *module, PyObject *task); + +static PyObject * +_asyncio__unregister_eager_task(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(task), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"task", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "_unregister_eager_task", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; + PyObject *task; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); + if (!args) { + goto exit; + } + task = args[0]; + return_value = _asyncio__unregister_eager_task_impl(module, task); + +exit: + return return_value; +} + PyDoc_STRVAR(_asyncio__enter_task__doc__, "_enter_task($module, /, loop, task)\n" "--\n" @@ -1243,6 +1368,66 @@ _asyncio__leave_task(PyObject *module, PyObject *const *args, Py_ssize_t nargs, return return_value; } +PyDoc_STRVAR(_asyncio__swap_current_task__doc__, +"_swap_current_task($module, /, loop, task)\n" +"--\n" +"\n" +"Temporarily swap in the supplied task and return the original one (or None).\n" +"\n" +"This is intended for use during eager coroutine execution."); + +#define _ASYNCIO__SWAP_CURRENT_TASK_METHODDEF \ + {"_swap_current_task", _PyCFunction_CAST(_asyncio__swap_current_task), METH_FASTCALL|METH_KEYWORDS, _asyncio__swap_current_task__doc__}, + +static PyObject * +_asyncio__swap_current_task_impl(PyObject *module, PyObject *loop, + PyObject *task); + +static PyObject * +_asyncio__swap_current_task(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 2 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(loop), &_Py_ID(task), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"loop", "task", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "_swap_current_task", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[2]; + PyObject *loop; + PyObject *task; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 2, 0, argsbuf); + if (!args) { + goto exit; + } + loop = args[0]; + task = args[1]; + return_value = _asyncio__swap_current_task_impl(module, loop, task); + +exit: + return return_value; +} + PyDoc_STRVAR(_asyncio_current_task__doc__, "current_task($module, /, loop=None)\n" "--\n" @@ -1302,4 +1487,4 @@ _asyncio_current_task(PyObject *module, PyObject *const *args, Py_ssize_t nargs, exit: return return_value; } -/*[clinic end generated code: output=00f494214f2fd008 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=6b0e283177b07639 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_testclinic.c.h b/Modules/clinic/_testclinic.c.h index 831f58ca650aab..cc69f5c3d2fe9f 100644 --- a/Modules/clinic/_testclinic.c.h +++ b/Modules/clinic/_testclinic.c.h @@ -2817,4 +2817,262 @@ gh_99240_double_free(PyObject *module, PyObject *const *args, Py_ssize_t nargs) exit: return return_value; } -/*[clinic end generated code: output=e8211606b03d733a input=a9049054013a1b77]*/ + +PyDoc_STRVAR(clone_f1__doc__, +"clone_f1($module, /, path)\n" +"--\n" +"\n"); + +#define CLONE_F1_METHODDEF \ + {"clone_f1", _PyCFunction_CAST(clone_f1), METH_FASTCALL|METH_KEYWORDS, clone_f1__doc__}, + +static PyObject * +clone_f1_impl(PyObject *module, const char *path); + +static PyObject * +clone_f1(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(path), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"path", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "clone_f1", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; + const char *path; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); + if (!args) { + goto exit; + } + if (!PyUnicode_Check(args[0])) { + _PyArg_BadArgument("clone_f1", "argument 'path'", "str", args[0]); + goto exit; + } + Py_ssize_t path_length; + path = PyUnicode_AsUTF8AndSize(args[0], &path_length); + if (path == NULL) { + goto exit; + } + if (strlen(path) != (size_t)path_length) { + PyErr_SetString(PyExc_ValueError, "embedded null character"); + goto exit; + } + return_value = clone_f1_impl(module, path); + +exit: + return return_value; +} + +PyDoc_STRVAR(clone_f2__doc__, +"clone_f2($module, /, path)\n" +"--\n" +"\n"); + +#define CLONE_F2_METHODDEF \ + {"clone_f2", _PyCFunction_CAST(clone_f2), METH_FASTCALL|METH_KEYWORDS, clone_f2__doc__}, + +static PyObject * +clone_f2_impl(PyObject *module, const char *path); + +static PyObject * +clone_f2(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(path), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"path", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "clone_f2", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; + const char *path; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); + if (!args) { + goto exit; + } + if (!PyUnicode_Check(args[0])) { + _PyArg_BadArgument("clone_f2", "argument 'path'", "str", args[0]); + goto exit; + } + Py_ssize_t path_length; + path = PyUnicode_AsUTF8AndSize(args[0], &path_length); + if (path == NULL) { + goto exit; + } + if (strlen(path) != (size_t)path_length) { + PyErr_SetString(PyExc_ValueError, "embedded null character"); + goto exit; + } + return_value = clone_f2_impl(module, path); + +exit: + return return_value; +} + +PyDoc_STRVAR(clone_with_conv_f1__doc__, +"clone_with_conv_f1($module, /, path=None)\n" +"--\n" +"\n"); + +#define CLONE_WITH_CONV_F1_METHODDEF \ + {"clone_with_conv_f1", _PyCFunction_CAST(clone_with_conv_f1), METH_FASTCALL|METH_KEYWORDS, clone_with_conv_f1__doc__}, + +static PyObject * +clone_with_conv_f1_impl(PyObject *module, custom_t path); + +static PyObject * +clone_with_conv_f1(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(path), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"path", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "clone_with_conv_f1", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; + custom_t path = { + .name = "clone_with_conv_f1", + }; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf); + if (!args) { + goto exit; + } + if (!noptargs) { + goto skip_optional_pos; + } + if (!custom_converter(args[0], &path)) { + goto exit; + } +skip_optional_pos: + return_value = clone_with_conv_f1_impl(module, path); + +exit: + return return_value; +} + +PyDoc_STRVAR(clone_with_conv_f2__doc__, +"clone_with_conv_f2($module, /, path=None)\n" +"--\n" +"\n"); + +#define CLONE_WITH_CONV_F2_METHODDEF \ + {"clone_with_conv_f2", _PyCFunction_CAST(clone_with_conv_f2), METH_FASTCALL|METH_KEYWORDS, clone_with_conv_f2__doc__}, + +static PyObject * +clone_with_conv_f2_impl(PyObject *module, custom_t path); + +static PyObject * +clone_with_conv_f2(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(path), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"path", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "clone_with_conv_f2", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; + custom_t path = { + .name = "clone_with_conv_f2", + }; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf); + if (!args) { + goto exit; + } + if (!noptargs) { + goto skip_optional_pos; + } + if (!custom_converter(args[0], &path)) { + goto exit; + } +skip_optional_pos: + return_value = clone_with_conv_f2_impl(module, path); + +exit: + return return_value; +} +/*[clinic end generated code: output=f58202a6e5df2d16 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_testinternalcapi.c.h b/Modules/clinic/_testinternalcapi.c.h index e8d5681b194916..41dd50437956c4 100644 --- a/Modules/clinic/_testinternalcapi.c.h +++ b/Modules/clinic/_testinternalcapi.c.h @@ -9,7 +9,7 @@ preserve PyDoc_STRVAR(_testinternalcapi_compiler_codegen__doc__, -"compiler_codegen($module, /, ast, filename, optimize)\n" +"compiler_codegen($module, /, ast, filename, optimize, compile_mode=0)\n" "--\n" "\n" "Apply compiler code generation to an AST."); @@ -19,7 +19,8 @@ PyDoc_STRVAR(_testinternalcapi_compiler_codegen__doc__, static PyObject * _testinternalcapi_compiler_codegen_impl(PyObject *module, PyObject *ast, - PyObject *filename, int optimize); + PyObject *filename, int optimize, + int compile_mode); static PyObject * _testinternalcapi_compiler_codegen(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) @@ -27,14 +28,14 @@ _testinternalcapi_compiler_codegen(PyObject *module, PyObject *const *args, Py_s PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) - #define NUM_KEYWORDS 3 + #define NUM_KEYWORDS 4 static struct { PyGC_Head _this_is_not_used; PyObject_VAR_HEAD PyObject *ob_item[NUM_KEYWORDS]; } _kwtuple = { .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) - .ob_item = { &_Py_ID(ast), &_Py_ID(filename), &_Py_ID(optimize), }, + .ob_item = { &_Py_ID(ast), &_Py_ID(filename), &_Py_ID(optimize), &_Py_ID(compile_mode), }, }; #undef NUM_KEYWORDS #define KWTUPLE (&_kwtuple.ob_base.ob_base) @@ -43,19 +44,21 @@ _testinternalcapi_compiler_codegen(PyObject *module, PyObject *const *args, Py_s # define KWTUPLE NULL #endif // !Py_BUILD_CORE - static const char * const _keywords[] = {"ast", "filename", "optimize", NULL}; + static const char * const _keywords[] = {"ast", "filename", "optimize", "compile_mode", NULL}; static _PyArg_Parser _parser = { .keywords = _keywords, .fname = "compiler_codegen", .kwtuple = KWTUPLE, }; #undef KWTUPLE - PyObject *argsbuf[3]; + PyObject *argsbuf[4]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 3; PyObject *ast; PyObject *filename; int optimize; + int compile_mode = 0; - args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 3, 3, 0, argsbuf); + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 3, 4, 0, argsbuf); if (!args) { goto exit; } @@ -65,7 +68,15 @@ _testinternalcapi_compiler_codegen(PyObject *module, PyObject *const *args, Py_s if (optimize == -1 && PyErr_Occurred()) { goto exit; } - return_value = _testinternalcapi_compiler_codegen_impl(module, ast, filename, optimize); + if (!noptargs) { + goto skip_optional_pos; + } + compile_mode = _PyLong_AsInt(args[3]); + if (compile_mode == -1 && PyErr_Occurred()) { + goto exit; + } +skip_optional_pos: + return_value = _testinternalcapi_compiler_codegen_impl(module, ast, filename, optimize, compile_mode); exit: return return_value; @@ -128,4 +139,66 @@ _testinternalcapi_optimize_cfg(PyObject *module, PyObject *const *args, Py_ssize exit: return return_value; } -/*[clinic end generated code: output=efe95836482fd542 input=a9049054013a1b77]*/ + +PyDoc_STRVAR(_testinternalcapi_assemble_code_object__doc__, +"assemble_code_object($module, /, filename, instructions, metadata)\n" +"--\n" +"\n" +"Create a code object for the given instructions."); + +#define _TESTINTERNALCAPI_ASSEMBLE_CODE_OBJECT_METHODDEF \ + {"assemble_code_object", _PyCFunction_CAST(_testinternalcapi_assemble_code_object), METH_FASTCALL|METH_KEYWORDS, _testinternalcapi_assemble_code_object__doc__}, + +static PyObject * +_testinternalcapi_assemble_code_object_impl(PyObject *module, + PyObject *filename, + PyObject *instructions, + PyObject *metadata); + +static PyObject * +_testinternalcapi_assemble_code_object(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 3 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(filename), &_Py_ID(instructions), &_Py_ID(metadata), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"filename", "instructions", "metadata", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "assemble_code_object", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[3]; + PyObject *filename; + PyObject *instructions; + PyObject *metadata; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 3, 3, 0, argsbuf); + if (!args) { + goto exit; + } + filename = args[0]; + instructions = args[1]; + metadata = args[2]; + return_value = _testinternalcapi_assemble_code_object_impl(module, filename, instructions, metadata); + +exit: + return return_value; +} +/*[clinic end generated code: output=ab661d56a14b1a1c input=a9049054013a1b77]*/ diff --git a/Modules/_sha3/clinic/sha3module.c.h b/Modules/clinic/sha3module.c.h similarity index 98% rename from Modules/_sha3/clinic/sha3module.c.h rename to Modules/clinic/sha3module.c.h index a0c7c1c043e515..299803a3420bf6 100644 --- a/Modules/_sha3/clinic/sha3module.c.h +++ b/Modules/clinic/sha3module.c.h @@ -12,7 +12,7 @@ PyDoc_STRVAR(py_sha3_new__doc__, "sha3_224(data=b\'\', /, *, usedforsecurity=True)\n" "--\n" "\n" -"Return a new BLAKE2b hash object."); +"Return a new SHA3 hash object."); static PyObject * py_sha3_new_impl(PyTypeObject *type, PyObject *data, int usedforsecurity); @@ -193,4 +193,4 @@ _sha3_shake_128_hexdigest(SHA3object *self, PyObject *arg) exit: return return_value; } -/*[clinic end generated code: output=747c3f34ddd14063 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=907cb475f3dc9ee0 input=a9049054013a1b77]*/ diff --git a/Modules/cmathmodule.c b/Modules/cmathmodule.c index b4f7e5424b4ccf..914a697f8e173b 100644 --- a/Modules/cmathmodule.c +++ b/Modules/cmathmodule.c @@ -1411,6 +1411,7 @@ cmath_exec(PyObject *mod) static PyModuleDef_Slot cmath_slots[] = { {Py_mod_exec, cmath_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/errnomodule.c b/Modules/errnomodule.c index df4e494ba8a973..fddde960a5fe9a 100644 --- a/Modules/errnomodule.c +++ b/Modules/errnomodule.c @@ -940,6 +940,7 @@ errno_exec(PyObject *module) static PyModuleDef_Slot errno_slots[] = { {Py_mod_exec, errno_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/faulthandler.c b/Modules/faulthandler.c index 9b4e4199cdc20a..428b090193f093 100644 --- a/Modules/faulthandler.c +++ b/Modules/faulthandler.c @@ -1274,6 +1274,8 @@ PyExec_faulthandler(PyObject *module) { static PyModuleDef_Slot faulthandler_slots[] = { {Py_mod_exec, PyExec_faulthandler}, + // XXX gh-103092: fix isolation. + //{Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/fcntlmodule.c b/Modules/fcntlmodule.c index 9a8ec8dc9858d7..6ca0b62bc5dca8 100644 --- a/Modules/fcntlmodule.c +++ b/Modules/fcntlmodule.c @@ -686,6 +686,7 @@ fcntl_exec(PyObject *module) static PyModuleDef_Slot fcntl_slots[] = { {Py_mod_exec, fcntl_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c index 3fd5f4cd70e832..26ddcdd538a4d4 100644 --- a/Modules/gcmodule.c +++ b/Modules/gcmodule.c @@ -2044,6 +2044,7 @@ gcmodule_exec(PyObject *module) static PyModuleDef_Slot gcmodule_slots[] = { {Py_mod_exec, gcmodule_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; @@ -2174,23 +2175,6 @@ _PyGC_DumpShutdownStats(PyInterpreterState *interp) } -static void -gc_fini_untrack(PyGC_Head *list) -{ - PyGC_Head *gc; - for (gc = GC_NEXT(list); gc != list; gc = GC_NEXT(list)) { - PyObject *op = FROM_GC(gc); - _PyObject_GC_UNTRACK(op); - // gh-92036: If a deallocator function expect the object to be tracked - // by the GC (ex: func_dealloc()), it can crash if called on an object - // which is no longer tracked by the GC. Leak one strong reference on - // purpose so the object is never deleted and its deallocator is not - // called. - Py_INCREF(op); - } -} - - void _PyGC_Fini(PyInterpreterState *interp) { @@ -2198,17 +2182,9 @@ _PyGC_Fini(PyInterpreterState *interp) Py_CLEAR(gcstate->garbage); Py_CLEAR(gcstate->callbacks); - if (!_Py_IsMainInterpreter(interp)) { - // bpo-46070: Explicitly untrack all objects currently tracked by the - // GC. Otherwise, if an object is used later by another interpreter, - // calling PyObject_GC_UnTrack() on the object crashs if the previous - // or the next object of the PyGC_Head structure became a dangling - // pointer. - for (int i = 0; i < NUM_GENERATIONS; i++) { - PyGC_Head *gen = GEN_HEAD(gcstate, i); - gc_fini_untrack(gen); - } - } + /* We expect that none of this interpreters objects are shared + with other interpreters. + See https://github.com/python/cpython/issues/90228. */ } /* for debugging */ @@ -2367,6 +2343,19 @@ _PyObject_GC_NewVar(PyTypeObject *tp, Py_ssize_t nitems) return op; } +PyObject * +PyUnstable_Object_GC_NewWithExtraData(PyTypeObject *tp, size_t extra_size) +{ + size_t presize = _PyType_PreHeaderSize(tp); + PyObject *op = gc_alloc(_PyObject_SIZE(tp) + extra_size, presize); + if (op == NULL) { + return NULL; + } + memset(op, 0, _PyObject_SIZE(tp) + extra_size); + _PyObject_Init(op, tp); + return op; +} + PyVarObject * _PyObject_GC_Resize(PyVarObject *op, Py_ssize_t nitems) { diff --git a/Modules/grpmodule.c b/Modules/grpmodule.c index f6298ca0ee84c1..57cdde6064c24e 100644 --- a/Modules/grpmodule.c +++ b/Modules/grpmodule.c @@ -327,6 +327,7 @@ grpmodule_exec(PyObject *module) static PyModuleDef_Slot grpmodule_slots[] = { {Py_mod_exec, grpmodule_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/itertoolsmodule.c b/Modules/itertoolsmodule.c index c986e02867ca82..555eab09935e9e 100644 --- a/Modules/itertoolsmodule.c +++ b/Modules/itertoolsmodule.c @@ -4693,6 +4693,7 @@ itertoolsmodule_exec(PyObject *mod) static struct PyModuleDef_Slot itertoolsmodule_slots[] = { {Py_mod_exec, itertoolsmodule_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/mathmodule.c b/Modules/mathmodule.c index 4a2381d9611776..3737a9654575ab 100644 --- a/Modules/mathmodule.c +++ b/Modules/mathmodule.c @@ -4064,6 +4064,7 @@ static PyMethodDef math_methods[] = { static PyModuleDef_Slot math_slots[] = { {Py_mod_exec, math_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/md5module.c b/Modules/md5module.c index 4f7bc77a8836a3..86605771d9643f 100644 --- a/Modules/md5module.c +++ b/Modules/md5module.c @@ -340,6 +340,7 @@ md5_exec(PyObject *m) static PyModuleDef_Slot _md5_slots[] = { {Py_mod_exec, md5_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/mmapmodule.c b/Modules/mmapmodule.c index fe76ca6eafaa88..a470dd3c2f3bba 100644 --- a/Modules/mmapmodule.c +++ b/Modules/mmapmodule.c @@ -1713,6 +1713,7 @@ mmap_exec(PyObject *module) static PyModuleDef_Slot mmap_slots[] = { {Py_mod_exec, mmap_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/nismodule.c b/Modules/nismodule.c index ec7f6d8031e84b..6d094490cea731 100644 --- a/Modules/nismodule.c +++ b/Modules/nismodule.c @@ -502,6 +502,9 @@ nis_exec(PyObject *module) static PyModuleDef_Slot nis_slots[] = { {Py_mod_exec, nis_exec}, + // XXX gh-103092: fix isolation. + {Py_mod_multiple_interpreters, Py_MOD_MULTIPLE_INTERPRETERS_NOT_SUPPORTED}, + //{Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/overlapped.c b/Modules/overlapped.c index 02c0f401be4c9e..ac637316583d2d 100644 --- a/Modules/overlapped.c +++ b/Modules/overlapped.c @@ -2050,6 +2050,7 @@ overlapped_exec(PyObject *module) static PyModuleDef_Slot overlapped_slots[] = { {Py_mod_exec, overlapped_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index dcb5e7a0e0408c..5022fdeb03703a 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -175,6 +175,14 @@ # define HAVE_PWRITEV_RUNTIME (pwritev != NULL) # endif +# ifdef HAVE_MKFIFOAT +# define HAVE_MKFIFOAT_RUNTIME (mkfifoat != NULL) +# endif + +# ifdef HAVE_MKNODAT +# define HAVE_MKNODAT_RUNTIME (mknodat != NULL) +# endif + #endif #ifdef HAVE_FUTIMESAT @@ -4802,7 +4810,7 @@ os__path_isdir_impl(PyObject *module, PyObject *path) } Py_BEGIN_ALLOW_THREADS - if (_path.wide) { + if (_path.wide) { if (_Py_GetFileInformationByName(_path.wide, FileStatBasicByNameInfo, &statInfo, sizeof(statInfo))) { if (!(statInfo.FileAttributes & FILE_ATTRIBUTE_REPARSE_POINT)) { @@ -4899,7 +4907,7 @@ os__path_isfile_impl(PyObject *module, PyObject *path) } Py_BEGIN_ALLOW_THREADS - if (_path.wide) { + if (_path.wide) { if (_Py_GetFileInformationByName(_path.wide, FileStatBasicByNameInfo, &statInfo, sizeof(statInfo))) { if (!(statInfo.FileAttributes & FILE_ATTRIBUTE_REPARSE_POINT)) { @@ -4995,7 +5003,7 @@ os__path_exists_impl(PyObject *module, PyObject *path) } Py_BEGIN_ALLOW_THREADS - if (_path.wide) { + if (_path.wide) { if (_Py_GetFileInformationByName(_path.wide, FileStatBasicByNameInfo, &statInfo, sizeof(statInfo))) { if (!(statInfo.FileAttributes & FILE_ATTRIBUTE_REPARSE_POINT)) { @@ -5082,7 +5090,7 @@ os__path_islink_impl(PyObject *module, PyObject *path) } Py_BEGIN_ALLOW_THREADS - if (_path.wide) { + if (_path.wide) { if (_Py_GetFileInformationByName(_path.wide, FileStatBasicByNameInfo, &statInfo, sizeof(statInfo))) { slow_path = FALSE; @@ -16785,6 +16793,7 @@ posixmodule_exec(PyObject *m) static PyModuleDef_Slot posixmodile_slots[] = { {Py_mod_exec, posixmodule_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/pwdmodule.c b/Modules/pwdmodule.c index a757380bd09f70..cc2e2a43893971 100644 --- a/Modules/pwdmodule.c +++ b/Modules/pwdmodule.c @@ -336,6 +336,7 @@ pwdmodule_exec(PyObject *module) static PyModuleDef_Slot pwdmodule_slots[] = { {Py_mod_exec, pwdmodule_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/pyexpat.c b/Modules/pyexpat.c index 0a744998b6c514..c0fbd4d39f0096 100644 --- a/Modules/pyexpat.c +++ b/Modules/pyexpat.c @@ -2056,6 +2056,9 @@ pyexpat_free(void *module) static PyModuleDef_Slot pyexpat_slots[] = { {Py_mod_exec, pyexpat_exec}, + // XXX gh-103092: fix isolation. + {Py_mod_multiple_interpreters, Py_MOD_MULTIPLE_INTERPRETERS_NOT_SUPPORTED}, + //{Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/resource.c b/Modules/resource.c index a97fb870062b82..2a8158c9be5359 100644 --- a/Modules/resource.c +++ b/Modules/resource.c @@ -514,6 +514,7 @@ resource_exec(PyObject *module) static struct PyModuleDef_Slot resource_slots[] = { {Py_mod_exec, resource_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/selectmodule.c b/Modules/selectmodule.c index 5a1e40d0b4a482..79bd5b59ab68f9 100644 --- a/Modules/selectmodule.c +++ b/Modules/selectmodule.c @@ -2651,6 +2651,7 @@ _select_exec(PyObject *m) static PyModuleDef_Slot _select_slots[] = { {Py_mod_exec, _select_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/sha1module.c b/Modules/sha1module.c index f8d4056fd34b65..bdb76c56f1a6e8 100644 --- a/Modules/sha1module.c +++ b/Modules/sha1module.c @@ -344,6 +344,7 @@ _sha1_exec(PyObject *module) static PyModuleDef_Slot _sha1_slots[] = { {Py_mod_exec, _sha1_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/sha2module.c b/Modules/sha2module.c index 72de20b44762d7..37d9b5c538fd0b 100644 --- a/Modules/sha2module.c +++ b/Modules/sha2module.c @@ -785,6 +785,7 @@ static int sha2_exec(PyObject *module) static PyModuleDef_Slot _sha2_slots[] = { {Py_mod_exec, sha2_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_sha3/sha3module.c b/Modules/sha3module.c similarity index 75% rename from Modules/_sha3/sha3module.c rename to Modules/sha3module.c index 633a0c0ea08d2a..f05187498a19b3 100644 --- a/Modules/_sha3/sha3module.c +++ b/Modules/sha3module.c @@ -22,23 +22,9 @@ #include "Python.h" #include "pycore_strhex.h" // _Py_strhex() #include "pycore_typeobject.h" // _PyType_GetModuleState() -#include "../hashlib.h" - -#include "sha3.c" +#include "hashlib.h" #define SHA3_MAX_DIGESTSIZE 64 /* 64 Bytes (512 Bits) for 224 to 512 */ -#define SHA3_LANESIZE 0 -#define SHA3_state sha3_ctx_t -#define SHA3_init sha3_init -#define SHA3_process sha3_update -#define SHA3_done(state, digest) sha3_final(digest, state) -#define SHA3_squeeze(state, out, len) shake_xof(state), shake_out(state, out, len) -#define SHA3_copystate(dest, src) memcpy(&(dest), &(src), sizeof(SHA3_state)) - -// no optimization -#define KeccakOpt 0 - -typedef enum { SUCCESS = 1, FAIL = 0, BAD_HASHLEN = 2 } HashReturn; typedef struct { PyTypeObject *sha3_224_type; @@ -70,10 +56,11 @@ class _sha3.shake_256 "SHA3object *" "&SHAKE256type" /* The structure for storing SHA3 info */ +#include "_hacl/Hacl_Hash_SHA3.h" + typedef struct { PyObject_HEAD - SHA3_state hash_state; - PyThread_type_lock lock; + Hacl_Streaming_Keccak_state *hash_state; } SHA3object; #include "clinic/sha3module.c.h" @@ -86,10 +73,23 @@ newSHA3object(PyTypeObject *type) if (newobj == NULL) { return NULL; } - newobj->lock = NULL; return newobj; } +static void sha3_update(Hacl_Streaming_Keccak_state *state, uint8_t *buf, Py_ssize_t len) { + /* Note: we explicitly ignore the error code on the basis that it would take > + * 1 billion years to hash more than 2^64 bytes. */ +#if PY_SSIZE_T_MAX > UINT32_MAX + while (len > UINT32_MAX) { + Hacl_Streaming_Keccak_update(state, buf, UINT32_MAX); + len -= UINT32_MAX; + buf += UINT32_MAX; + } +#endif + /* Cast to uint32_t is safe: len <= UINT32_MAX at this point. */ + Hacl_Streaming_Keccak_update(state, buf, (uint32_t) len); +} + /*[clinic input] @classmethod _sha3.sha3_224.__new__ as py_sha3_new @@ -98,14 +98,13 @@ _sha3.sha3_224.__new__ as py_sha3_new * usedforsecurity: bool = True -Return a new BLAKE2b hash object. +Return a new SHA3 hash object. [clinic start generated code]*/ static PyObject * py_sha3_new_impl(PyTypeObject *type, PyObject *data, int usedforsecurity) -/*[clinic end generated code: output=90409addc5d5e8b0 input=bcfcdf2e4368347a]*/ +/*[clinic end generated code: output=90409addc5d5e8b0 input=637e5f8f6a93982a]*/ { - HashReturn res; Py_buffer buf = {NULL, NULL}; SHA3State *state = _PyType_GetModuleState(type); SHA3object *self = newSHA3object(type); @@ -116,49 +115,29 @@ py_sha3_new_impl(PyTypeObject *type, PyObject *data, int usedforsecurity) assert(state != NULL); if (type == state->sha3_224_type) { - res = sha3_init(&self->hash_state, 28); + self->hash_state = Hacl_Streaming_Keccak_malloc(Spec_Hash_Definitions_SHA3_224); } else if (type == state->sha3_256_type) { - res = sha3_init(&self->hash_state, 32); + self->hash_state = Hacl_Streaming_Keccak_malloc(Spec_Hash_Definitions_SHA3_256); } else if (type == state->sha3_384_type) { - res = sha3_init(&self->hash_state, 48); + self->hash_state = Hacl_Streaming_Keccak_malloc(Spec_Hash_Definitions_SHA3_384); } else if (type == state->sha3_512_type) { - res = sha3_init(&self->hash_state, 64); + self->hash_state = Hacl_Streaming_Keccak_malloc(Spec_Hash_Definitions_SHA3_512); } else if (type == state->shake_128_type) { - res = sha3_init(&self->hash_state, 16); + self->hash_state = Hacl_Streaming_Keccak_malloc(Spec_Hash_Definitions_Shake128); } else if (type == state->shake_256_type) { - res = sha3_init(&self->hash_state, 32); + self->hash_state = Hacl_Streaming_Keccak_malloc(Spec_Hash_Definitions_Shake256); } else { PyErr_BadInternalCall(); goto error; } - if (res != SUCCESS) { - PyErr_SetString(PyExc_RuntimeError, - "internal error in SHA3 initialize()"); - goto error; - } - if (data) { GET_BUFFER_VIEW_OR_ERROR(data, &buf, goto error); - if (buf.len >= HASHLIB_GIL_MINSIZE) { - /* invariant: New objects can't be accessed by other code yet, - * thus it's safe to release the GIL without locking the object. - */ - Py_BEGIN_ALLOW_THREADS - res = SHA3_process(&self->hash_state, buf.buf, buf.len); - Py_END_ALLOW_THREADS - } - else { - res = SHA3_process(&self->hash_state, buf.buf, buf.len); - } - if (res != SUCCESS) { - PyErr_SetString(PyExc_RuntimeError, - "internal error in SHA3 Update()"); - goto error; - } - PyBuffer_Release(&buf); + sha3_update(self->hash_state, buf.buf, buf.len); } + PyBuffer_Release(&buf); + return (PyObject *)self; error: @@ -177,10 +156,7 @@ py_sha3_new_impl(PyTypeObject *type, PyObject *data, int usedforsecurity) static void SHA3_dealloc(SHA3object *self) { - if (self->lock) { - PyThread_free_lock(self->lock); - } - + Hacl_Streaming_Keccak_free(self->hash_state); PyTypeObject *tp = Py_TYPE(self); PyObject_Free(self); Py_DECREF(tp); @@ -205,9 +181,7 @@ _sha3_sha3_224_copy_impl(SHA3object *self) if ((newobj = newSHA3object(Py_TYPE(self))) == NULL) { return NULL; } - ENTER_HASHLIB(self); - SHA3_copystate(newobj->hash_state, self->hash_state); - LEAVE_HASHLIB(self); + newobj->hash_state = Hacl_Streaming_Keccak_copy(self->hash_state); return (PyObject *)newobj; } @@ -222,20 +196,12 @@ static PyObject * _sha3_sha3_224_digest_impl(SHA3object *self) /*[clinic end generated code: output=fd531842e20b2d5b input=5b2a659536bbd248]*/ { - unsigned char digest[SHA3_MAX_DIGESTSIZE + SHA3_LANESIZE]; - SHA3_state temp; - HashReturn res; - - ENTER_HASHLIB(self); - SHA3_copystate(temp, self->hash_state); - LEAVE_HASHLIB(self); - res = SHA3_done(&temp, digest); - if (res != SUCCESS) { - PyErr_SetString(PyExc_RuntimeError, "internal error in SHA3 Final()"); - return NULL; - } + unsigned char digest[SHA3_MAX_DIGESTSIZE]; + // This function errors out if the algorithm is Shake. Here, we know this + // not to be the case, and therefore do not perform error checking. + Hacl_Streaming_Keccak_finish(self->hash_state, digest); return PyBytes_FromStringAndSize((const char *)digest, - self->hash_state.mdlen); + Hacl_Streaming_Keccak_hash_len(self->hash_state)); } @@ -249,21 +215,10 @@ static PyObject * _sha3_sha3_224_hexdigest_impl(SHA3object *self) /*[clinic end generated code: output=75ad03257906918d input=2d91bb6e0d114ee3]*/ { - unsigned char digest[SHA3_MAX_DIGESTSIZE + SHA3_LANESIZE]; - SHA3_state temp; - HashReturn res; - - /* Get the raw (binary) digest value */ - ENTER_HASHLIB(self); - SHA3_copystate(temp, self->hash_state); - LEAVE_HASHLIB(self); - res = SHA3_done(&temp, digest); - if (res != SUCCESS) { - PyErr_SetString(PyExc_RuntimeError, "internal error in SHA3 Final()"); - return NULL; - } + unsigned char digest[SHA3_MAX_DIGESTSIZE]; + Hacl_Streaming_Keccak_finish(self->hash_state, digest); return _Py_strhex((const char *)digest, - self->hash_state.mdlen); + Hacl_Streaming_Keccak_hash_len(self->hash_state)); } @@ -281,36 +236,8 @@ _sha3_sha3_224_update(SHA3object *self, PyObject *data) /*[clinic end generated code: output=d3223352286ed357 input=a887f54dcc4ae227]*/ { Py_buffer buf; - HashReturn res; - GET_BUFFER_VIEW_OR_ERROUT(data, &buf); - - /* add new data, the function takes the length in bits not bytes */ - if (self->lock == NULL && buf.len >= HASHLIB_GIL_MINSIZE) { - self->lock = PyThread_allocate_lock(); - } - /* Once a lock exists all code paths must be synchronized. We have to - * release the GIL even for small buffers as acquiring the lock may take - * an unlimited amount of time when another thread updates this object - * with lots of data. */ - if (self->lock) { - Py_BEGIN_ALLOW_THREADS - PyThread_acquire_lock(self->lock, 1); - res = SHA3_process(&self->hash_state, buf.buf, buf.len); - PyThread_release_lock(self->lock); - Py_END_ALLOW_THREADS - } - else { - res = SHA3_process(&self->hash_state, buf.buf, buf.len); - } - - if (res != SUCCESS) { - PyBuffer_Release(&buf); - PyErr_SetString(PyExc_RuntimeError, - "internal error in SHA3 Update()"); - return NULL; - } - + sha3_update(self->hash_state, buf.buf, buf.len); PyBuffer_Release(&buf); Py_RETURN_NONE; } @@ -328,7 +255,7 @@ static PyMethodDef SHA3_methods[] = { static PyObject * SHA3_get_block_size(SHA3object *self, void *closure) { - int rate = self->hash_state.rsiz; + uint32_t rate = Hacl_Streaming_Keccak_block_len(self->hash_state); return PyLong_FromLong(rate); } @@ -363,14 +290,19 @@ SHA3_get_name(SHA3object *self, void *closure) static PyObject * SHA3_get_digest_size(SHA3object *self, void *closure) { - return PyLong_FromLong(self->hash_state.mdlen); + // Preserving previous behavior: variable-length algorithms return 0 + if (Hacl_Streaming_Keccak_is_shake(self->hash_state)) + return PyLong_FromLong(0); + else + return PyLong_FromLong(Hacl_Streaming_Keccak_hash_len(self->hash_state)); } static PyObject * SHA3_get_capacity_bits(SHA3object *self, void *closure) { - int capacity = 1600 - self->hash_state.rsiz * 8; + uint32_t rate = Hacl_Streaming_Keccak_block_len(self->hash_state) * 8; + int capacity = 1600 - rate; return PyLong_FromLong(capacity); } @@ -378,7 +310,7 @@ SHA3_get_capacity_bits(SHA3object *self, void *closure) static PyObject * SHA3_get_rate_bits(SHA3object *self, void *closure) { - unsigned int rate = self->hash_state.rsiz * 8; + uint32_t rate = Hacl_Streaming_Keccak_block_len(self->hash_state) * 8; return PyLong_FromLong(rate); } @@ -455,28 +387,26 @@ static PyObject * _SHAKE_digest(SHA3object *self, unsigned long digestlen, int hex) { unsigned char *digest = NULL; - SHA3_state temp; PyObject *result = NULL; if (digestlen >= (1 << 29)) { PyErr_SetString(PyExc_ValueError, "length is too large"); return NULL; } - /* ExtractLane needs at least SHA3_MAX_DIGESTSIZE + SHA3_LANESIZE and - * SHA3_LANESIZE extra space. - */ - digest = (unsigned char*)PyMem_Malloc(digestlen + SHA3_LANESIZE); + digest = (unsigned char*)PyMem_Malloc(digestlen); if (digest == NULL) { return PyErr_NoMemory(); } - /* Get the raw (binary) digest value */ - ENTER_HASHLIB(self); - SHA3_copystate(temp, self->hash_state); - LEAVE_HASHLIB(self); - SHA3_squeeze(&temp, digest, digestlen); + /* Get the raw (binary) digest value. The HACL functions errors out if: + * - the algorith is not shake -- not the case here + * - the output length is zero -- we follow the existing behavior and return + * an empty digest, without raising an error */ + if (digestlen > 0) { + Hacl_Streaming_Keccak_squeeze(self->hash_state, digest, digestlen); + } if (hex) { - result = _Py_strhex((const char *)digest, digestlen); + result = _Py_strhex((const char *)digest, digestlen); } else { result = PyBytes_FromStringAndSize((const char *)digest, digestlen); @@ -628,11 +558,8 @@ _sha3_exec(PyObject *m) init_sha3type(shake_256_type, SHAKE256_spec); #undef init_sha3type - if (PyModule_AddIntConstant(m, "keccakopt", KeccakOpt) < 0) { - return -1; - } if (PyModule_AddStringConstant(m, "implementation", - "tiny_sha3") < 0) { + "HACL") < 0) { return -1; } @@ -641,6 +568,7 @@ _sha3_exec(PyObject *m) static PyModuleDef_Slot _sha3_slots[] = { {Py_mod_exec, _sha3_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/signalmodule.c b/Modules/signalmodule.c index fdd1450050fa1b..2350236ad46b25 100644 --- a/Modules/signalmodule.c +++ b/Modules/signalmodule.c @@ -1695,6 +1695,7 @@ _signal_module_free(void *module) static PyModuleDef_Slot signal_slots[] = { {Py_mod_exec, signal_module_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c index f11d4b1a6e0591..c11fb4400eab2f 100644 --- a/Modules/socketmodule.c +++ b/Modules/socketmodule.c @@ -1903,6 +1903,11 @@ getsockaddrarg(PySocketSockObject *s, PyObject *args, /* RDS sockets use sockaddr_in: fall-through */ #endif /* AF_RDS */ +#ifdef AF_DIVERT + case AF_DIVERT: + /* FreeBSD divert(4) sockets use sockaddr_in: fall-through */ +#endif /* AF_DIVERT */ + case AF_INET: { struct maybe_idna host = {NULL, NULL}; @@ -6878,8 +6883,10 @@ socket_getnameinfo(PyObject *self, PyObject *args) } #endif } + Py_BEGIN_ALLOW_THREADS error = getnameinfo(res->ai_addr, (socklen_t) res->ai_addrlen, hbuf, sizeof(hbuf), pbuf, sizeof(pbuf), flags); + Py_END_ALLOW_THREADS if (error) { socket_state *state = get_module_state(self); set_gaierror(state, error); @@ -7683,6 +7690,14 @@ socket_exec(PyObject *m) ADD_INT_MACRO(m, AF_SYSTEM); #endif +/* FreeBSD divert(4) */ +#ifdef PF_DIVERT + PyModule_AddIntMacro(m, PF_DIVERT); +#endif +#ifdef AF_DIVERT + PyModule_AddIntMacro(m, AF_DIVERT); +#endif + #ifdef AF_PACKET ADD_INT_MACRO(m, AF_PACKET); #endif @@ -8857,6 +8872,7 @@ socket_exec(PyObject *m) static struct PyModuleDef_Slot socket_slots[] = { {Py_mod_exec, socket_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL}, }; diff --git a/Modules/spwdmodule.c b/Modules/spwdmodule.c index 42123c93b59365..13f1115feefa86 100644 --- a/Modules/spwdmodule.c +++ b/Modules/spwdmodule.c @@ -224,6 +224,7 @@ spwdmodule_exec(PyObject *module) static PyModuleDef_Slot spwdmodule_slots[] = { {Py_mod_exec, spwdmodule_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/symtablemodule.c b/Modules/symtablemodule.c index 91538b4fb15cbd..1cc319cc3410d8 100644 --- a/Modules/symtablemodule.c +++ b/Modules/symtablemodule.c @@ -100,6 +100,7 @@ symtable_init_constants(PyObject *m) static PyModuleDef_Slot symtable_slots[] = { {Py_mod_exec, symtable_init_constants}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/syslogmodule.c b/Modules/syslogmodule.c index f45aa5227f1cbf..6db8de9c491dd9 100644 --- a/Modules/syslogmodule.c +++ b/Modules/syslogmodule.c @@ -406,6 +406,7 @@ syslog_exec(PyObject *module) static PyModuleDef_Slot syslog_slots[] = { {Py_mod_exec, syslog_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/termios.c b/Modules/termios.c index fcc8f042679870..6dc8200572bc0c 100644 --- a/Modules/termios.c +++ b/Modules/termios.c @@ -85,7 +85,7 @@ termios_tcgetattr_impl(PyObject *module, int fd) int r; Py_BEGIN_ALLOW_THREADS - r = tcgetattr(fd, &mode); + r = tcgetattr(fd, &mode); Py_END_ALLOW_THREADS if (r == -1) { return PyErr_SetFromErrno(state->TermiosError); @@ -372,7 +372,7 @@ termios_tcgetwinsize_impl(PyObject *module, int fd) #if defined(TIOCGWINSZ) termiosmodulestate *state = PyModule_GetState(module); struct winsize w; - int r; + int r; Py_BEGIN_ALLOW_THREADS r = ioctl(fd, TIOCGWINSZ, &w); @@ -1253,6 +1253,7 @@ termios_exec(PyObject *mod) static PyModuleDef_Slot termios_slots[] = { {Py_mod_exec, termios_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/timemodule.c b/Modules/timemodule.c index c50e689bb6986c..3607855dbd8f27 100644 --- a/Modules/timemodule.c +++ b/Modules/timemodule.c @@ -2107,6 +2107,7 @@ time_module_free(void *module) static struct PyModuleDef_Slot time_slots[] = { {Py_mod_exec, time_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/unicodedata.c b/Modules/unicodedata.c index c108f14871f946..41dcd5f8f883f2 100644 --- a/Modules/unicodedata.c +++ b/Modules/unicodedata.c @@ -1516,6 +1516,7 @@ unicodedata_exec(PyObject *module) static PyModuleDef_Slot unicodedata_slots[] = { {Py_mod_exec, unicodedata_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/xxlimited.c b/Modules/xxlimited.c index 5f5297ba6337af..3935c00fc26530 100644 --- a/Modules/xxlimited.c +++ b/Modules/xxlimited.c @@ -390,6 +390,7 @@ xx_modexec(PyObject *m) static PyModuleDef_Slot xx_slots[] = { {Py_mod_exec, xx_modexec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/xxlimited_35.c b/Modules/xxlimited_35.c index 361c7e76d77f50..1ff3ef1cb6f296 100644 --- a/Modules/xxlimited_35.c +++ b/Modules/xxlimited_35.c @@ -293,6 +293,7 @@ xx_modexec(PyObject *m) static PyModuleDef_Slot xx_slots[] = { {Py_mod_exec, xx_modexec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/xxmodule.c b/Modules/xxmodule.c index a676fdb4ec773a..1e4e0ea3743ce3 100644 --- a/Modules/xxmodule.c +++ b/Modules/xxmodule.c @@ -383,6 +383,7 @@ xx_exec(PyObject *m) static struct PyModuleDef_Slot xx_slots[] = { {Py_mod_exec, xx_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL}, }; diff --git a/Modules/xxsubtype.c b/Modules/xxsubtype.c index 8512baf7cd0a2d..744ba7bf5d28b6 100644 --- a/Modules/xxsubtype.c +++ b/Modules/xxsubtype.c @@ -286,6 +286,7 @@ xxsubtype_exec(PyObject* m) static struct PyModuleDef_Slot xxsubtype_slots[] = { {Py_mod_exec, xxsubtype_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL}, }; diff --git a/Modules/zlibmodule.c b/Modules/zlibmodule.c index e2f7dbaca87a9f..b67844a67c315c 100644 --- a/Modules/zlibmodule.c +++ b/Modules/zlibmodule.c @@ -2109,6 +2109,7 @@ zlib_exec(PyObject *mod) static PyModuleDef_Slot zlib_slots[] = { {Py_mod_exec, zlib_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Objects/boolobject.c b/Objects/boolobject.c index 597a76fa5cb162..0300f7bb4e3dc0 100644 --- a/Objects/boolobject.c +++ b/Objects/boolobject.c @@ -73,6 +73,22 @@ bool_vectorcall(PyObject *type, PyObject * const*args, /* Arithmetic operations redefined to return bool if both args are bool. */ +static PyObject * +bool_invert(PyObject *v) +{ + if (PyErr_WarnEx(PyExc_DeprecationWarning, + "Bitwise inversion '~' on bool is deprecated. This " + "returns the bitwise inversion of the underlying int " + "object and is usually not what you expect from negating " + "a bool. Use the 'not' operator for boolean negation or " + "~int(x) if you really want the bitwise inversion of the " + "underlying int.", + 1) < 0) { + return NULL; + } + return PyLong_Type.tp_as_number->nb_invert(v); +} + static PyObject * bool_and(PyObject *a, PyObject *b) { @@ -119,7 +135,7 @@ static PyNumberMethods bool_as_number = { 0, /* nb_positive */ 0, /* nb_absolute */ 0, /* nb_bool */ - 0, /* nb_invert */ + (unaryfunc)bool_invert, /* nb_invert */ 0, /* nb_lshift */ 0, /* nb_rshift */ bool_and, /* nb_and */ diff --git a/Objects/bytearrayobject.c b/Objects/bytearrayobject.c index 49d4dd524696a5..c36db59baaa10d 100644 --- a/Objects/bytearrayobject.c +++ b/Objects/bytearrayobject.c @@ -61,6 +61,7 @@ static void bytearray_releasebuffer(PyByteArrayObject *obj, Py_buffer *view) { obj->ob_exports--; + assert(obj->ob_exports >= 0); } static int diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c index 27b2ad4f2cb38f..abbf3eeb16c35c 100644 --- a/Objects/bytesobject.c +++ b/Objects/bytesobject.c @@ -423,9 +423,6 @@ formatfloat(PyObject *v, int flags, int prec, int type, if (flags & F_ALT) { dtoa_flags |= Py_DTSF_ALT; } - if (flags & F_NO_NEG_0) { - dtoa_flags |= Py_DTSF_NO_NEG_0; - } p = PyOS_double_to_string(x, type, prec, dtoa_flags, NULL); if (p == NULL) @@ -705,7 +702,6 @@ _PyBytes_FormatEx(const char *format, Py_ssize_t format_len, case ' ': flags |= F_BLANK; continue; case '#': flags |= F_ALT; continue; case '0': flags |= F_ZERO; continue; - case 'z': flags |= F_NO_NEG_0; continue; } break; } diff --git a/Objects/call.c b/Objects/call.c index cf6e357a990441..0d548dcd5e1aed 100644 --- a/Objects/call.c +++ b/Objects/call.c @@ -157,6 +157,42 @@ PyObject_VectorcallDict(PyObject *callable, PyObject *const *args, return _PyObject_FastCallDictTstate(tstate, callable, args, nargsf, kwargs); } +static void +object_is_not_callable(PyThreadState *tstate, PyObject *callable) +{ + if (Py_IS_TYPE(callable, &PyModule_Type)) { + // >>> import pprint + // >>> pprint(thing) + // Traceback (most recent call last): + // File "", line 1, in + // TypeError: 'module' object is not callable. Did you mean: 'pprint.pprint(...)'? + PyObject *name = PyModule_GetNameObject(callable); + if (name == NULL) { + _PyErr_Clear(tstate); + goto basic_type_error; + } + PyObject *attr; + int res = _PyObject_LookupAttr(callable, name, &attr); + if (res < 0) { + _PyErr_Clear(tstate); + } + else if (res > 0 && PyCallable_Check(attr)) { + _PyErr_Format(tstate, PyExc_TypeError, + "'%.200s' object is not callable. " + "Did you mean: '%U.%U(...)'?", + Py_TYPE(callable)->tp_name, name, name); + Py_DECREF(attr); + Py_DECREF(name); + return; + } + Py_XDECREF(attr); + Py_DECREF(name); + } +basic_type_error: + _PyErr_Format(tstate, PyExc_TypeError, "'%.200s' object is not callable", + Py_TYPE(callable)->tp_name); +} + PyObject * _PyObject_MakeTpCall(PyThreadState *tstate, PyObject *callable, @@ -171,9 +207,7 @@ _PyObject_MakeTpCall(PyThreadState *tstate, PyObject *callable, * temporary dictionary for keyword arguments (if any) */ ternaryfunc call = Py_TYPE(callable)->tp_call; if (call == NULL) { - _PyErr_Format(tstate, PyExc_TypeError, - "'%.200s' object is not callable", - Py_TYPE(callable)->tp_name); + object_is_not_callable(tstate, callable); return NULL; } @@ -322,9 +356,7 @@ _PyObject_Call(PyThreadState *tstate, PyObject *callable, else { call = Py_TYPE(callable)->tp_call; if (call == NULL) { - _PyErr_Format(tstate, PyExc_TypeError, - "'%.200s' object is not callable", - Py_TYPE(callable)->tp_name); + object_is_not_callable(tstate, callable); return NULL; } diff --git a/Objects/clinic/memoryobject.c.h b/Objects/clinic/memoryobject.c.h index ff7b50bb114b05..25a22341185903 100644 --- a/Objects/clinic/memoryobject.c.h +++ b/Objects/clinic/memoryobject.c.h @@ -62,6 +62,66 @@ memoryview(PyTypeObject *type, PyObject *args, PyObject *kwargs) return return_value; } +PyDoc_STRVAR(memoryview__from_flags__doc__, +"_from_flags($type, /, object, flags)\n" +"--\n" +"\n" +"Create a new memoryview object which references the given object."); + +#define MEMORYVIEW__FROM_FLAGS_METHODDEF \ + {"_from_flags", _PyCFunction_CAST(memoryview__from_flags), METH_FASTCALL|METH_KEYWORDS|METH_CLASS, memoryview__from_flags__doc__}, + +static PyObject * +memoryview__from_flags_impl(PyTypeObject *type, PyObject *object, int flags); + +static PyObject * +memoryview__from_flags(PyTypeObject *type, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 2 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(object), &_Py_ID(flags), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"object", "flags", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "_from_flags", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[2]; + PyObject *object; + int flags; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 2, 0, argsbuf); + if (!args) { + goto exit; + } + object = args[0]; + flags = _PyLong_AsInt(args[1]); + if (flags == -1 && PyErr_Occurred()) { + goto exit; + } + return_value = memoryview__from_flags_impl(type, object, flags); + +exit: + return return_value; +} + PyDoc_STRVAR(memoryview_release__doc__, "release($self, /)\n" "--\n" @@ -356,4 +416,4 @@ memoryview_hex(PyMemoryViewObject *self, PyObject *const *args, Py_ssize_t nargs exit: return return_value; } -/*[clinic end generated code: output=a832f2fc44e4794c input=a9049054013a1b77]*/ +/*[clinic end generated code: output=01613814112cedd7 input=a9049054013a1b77]*/ diff --git a/Objects/descrobject.c b/Objects/descrobject.c index 334be75e8df9df..17c0c85a06c4b8 100644 --- a/Objects/descrobject.c +++ b/Objects/descrobject.c @@ -978,6 +978,12 @@ PyDescr_NewMember(PyTypeObject *type, PyMemberDef *member) { PyMemberDescrObject *descr; + if (member->flags & Py_RELATIVE_OFFSET) { + PyErr_SetString( + PyExc_SystemError, + "PyDescr_NewMember used with Py_RELATIVE_OFFSET"); + return NULL; + } descr = (PyMemberDescrObject *)descr_new(&PyMemberDescr_Type, type, member->name); if (descr != NULL) diff --git a/Objects/exceptions.c b/Objects/exceptions.c index 6c9dfbd9b415cf..ba5ee291f08b0c 100644 --- a/Objects/exceptions.c +++ b/Objects/exceptions.c @@ -3598,7 +3598,7 @@ _PyExc_InitTypes(PyInterpreterState *interp) { for (size_t i=0; i < Py_ARRAY_LENGTH(static_exceptions); i++) { PyTypeObject *exc = static_exceptions[i].exc; - if (_PyStaticType_InitBuiltin(exc) < 0) { + if (_PyStaticType_InitBuiltin(interp, exc) < 0) { return -1; } } @@ -3609,13 +3609,9 @@ _PyExc_InitTypes(PyInterpreterState *interp) static void _PyExc_FiniTypes(PyInterpreterState *interp) { - if (!_Py_IsMainInterpreter(interp)) { - return; - } - for (Py_ssize_t i=Py_ARRAY_LENGTH(static_exceptions) - 1; i >= 0; i--) { PyTypeObject *exc = static_exceptions[i].exc; - _PyStaticType_Dealloc(exc); + _PyStaticType_Dealloc(interp, exc); } } diff --git a/Objects/floatobject.c b/Objects/floatobject.c index 9c2315781bed36..d257857d9c619c 100644 --- a/Objects/floatobject.c +++ b/Objects/floatobject.c @@ -12,7 +12,7 @@ #include "pycore_object.h" // _PyObject_Init() #include "pycore_pymath.h" // _PY_SHORT_FLOAT_REPR #include "pycore_pystate.h" // _PyInterpreterState_GET() -#include "pycore_structseq.h" // _PyStructSequence_FiniType() +#include "pycore_structseq.h" // _PyStructSequence_FiniBuiltin() #include #include @@ -1991,8 +1991,9 @@ PyStatus _PyFloat_InitTypes(PyInterpreterState *interp) { /* Init float info */ - if (_PyStructSequence_InitBuiltin(&FloatInfoType, - &floatinfo_desc) < 0) { + if (_PyStructSequence_InitBuiltin(interp, &FloatInfoType, + &floatinfo_desc) < 0) + { return _PyStatus_ERR("can't init float info type"); } @@ -2028,9 +2029,7 @@ _PyFloat_Fini(PyInterpreterState *interp) void _PyFloat_FiniType(PyInterpreterState *interp) { - if (_Py_IsMainInterpreter(interp)) { - _PyStructSequence_FiniType(&FloatInfoType); - } + _PyStructSequence_FiniBuiltin(interp, &FloatInfoType); } /* Print summary info about the state of the optimized allocator */ diff --git a/Objects/frameobject.c b/Objects/frameobject.c index ef0070199ab2c0..d9aaea7831a380 100644 --- a/Objects/frameobject.c +++ b/Objects/frameobject.c @@ -38,7 +38,7 @@ PyFrame_GetLineNumber(PyFrameObject *f) return f->f_lineno; } else { - return _PyInterpreterFrame_GetLine(f->f_frame); + return PyUnstable_InterpreterFrame_GetLine(f->f_frame); } } @@ -1224,6 +1224,10 @@ _PyFrame_FastToLocalsWithError(_PyInterpreterFrame *frame) } PyObject *name = PyTuple_GET_ITEM(co->co_localsplusnames, i); + _PyLocals_Kind kind = _PyLocals_GetKind(co->co_localspluskinds, i); + if (kind & CO_FAST_HIDDEN) { + continue; + } if (value == NULL) { if (PyObject_DelItem(locals, name) != 0) { if (PyErr_ExceptionMatches(PyExc_KeyError)) { diff --git a/Objects/genobject.c b/Objects/genobject.c index 6316fa9865fe65..937d497753e970 100644 --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -12,6 +12,7 @@ #include "pycore_pystate.h" // _PyThreadState_GET() #include "structmember.h" // PyMemberDef #include "opcode.h" // SEND +#include "frameobject.h" // _PyInterpreterFrame_GetLine #include "pystats.h" static PyObject *gen_close(PyGenObject *, PyObject *); @@ -1322,7 +1323,7 @@ compute_cr_origin(int origin_depth, _PyInterpreterFrame *current_frame) frame = current_frame; for (int i = 0; i < frame_count; ++i) { PyCodeObject *code = frame->f_code; - int line = _PyInterpreterFrame_GetLine(frame); + int line = PyUnstable_InterpreterFrame_GetLine(frame); PyObject *frameinfo = Py_BuildValue("OiO", code->co_filename, line, code->co_name); if (!frameinfo) { diff --git a/Objects/longobject.c b/Objects/longobject.c index f84809b8a8986a..853e934e2107ea 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -7,9 +7,8 @@ #include "pycore_initconfig.h" // _PyStatus_OK() #include "pycore_long.h" // _Py_SmallInts #include "pycore_object.h" // _PyObject_Init() -#include "pycore_pystate.h" // _Py_IsMainInterpreter() #include "pycore_runtime.h" // _PY_NSMALLPOSINTS -#include "pycore_structseq.h" // _PyStructSequence_FiniType() +#include "pycore_structseq.h" // _PyStructSequence_FiniBuiltin() #include #include @@ -6352,7 +6351,9 @@ PyStatus _PyLong_InitTypes(PyInterpreterState *interp) { /* initialize int_info */ - if (_PyStructSequence_InitBuiltin(&Int_InfoType, &int_info_desc) < 0) { + if (_PyStructSequence_InitBuiltin(interp, &Int_InfoType, + &int_info_desc) < 0) + { return _PyStatus_ERR("can't init int info type"); } @@ -6363,9 +6364,5 @@ _PyLong_InitTypes(PyInterpreterState *interp) void _PyLong_FiniTypes(PyInterpreterState *interp) { - if (!_Py_IsMainInterpreter(interp)) { - return; - } - - _PyStructSequence_FiniType(&Int_InfoType); + _PyStructSequence_FiniBuiltin(interp, &Int_InfoType); } diff --git a/Objects/memoryobject.c b/Objects/memoryobject.c index 34cc797b404cda..b0168044d9f85a 100644 --- a/Objects/memoryobject.c +++ b/Objects/memoryobject.c @@ -85,7 +85,7 @@ mbuf_alloc(void) } static PyObject * -_PyManagedBuffer_FromObject(PyObject *base) +_PyManagedBuffer_FromObject(PyObject *base, int flags) { _PyManagedBufferObject *mbuf; @@ -93,7 +93,7 @@ _PyManagedBuffer_FromObject(PyObject *base) if (mbuf == NULL) return NULL; - if (PyObject_GetBuffer(base, &mbuf->master, PyBUF_FULL_RO) < 0) { + if (PyObject_GetBuffer(base, &mbuf->master, flags) < 0) { mbuf->master.obj = NULL; Py_DECREF(mbuf); return NULL; @@ -193,6 +193,20 @@ PyTypeObject _PyManagedBuffer_Type = { return -1; \ } +#define CHECK_RESTRICTED(mv) \ + if (((PyMemoryViewObject *)(mv))->flags & _Py_MEMORYVIEW_RESTRICTED) { \ + PyErr_SetString(PyExc_ValueError, \ + "cannot create new view on restricted memoryview"); \ + return NULL; \ + } + +#define CHECK_RESTRICTED_INT(mv) \ + if (((PyMemoryViewObject *)(mv))->flags & _Py_MEMORYVIEW_RESTRICTED) { \ + PyErr_SetString(PyExc_ValueError, \ + "cannot create new view on restricted memoryview"); \ + return -1; \ + } + /* See gh-92888. These macros signal that we need to check the memoryview again due to possible read after frees. */ #define CHECK_RELEASED_AGAIN(mv) CHECK_RELEASED(mv) @@ -777,22 +791,24 @@ PyMemoryView_FromBuffer(const Py_buffer *info) return mv; } -/* Create a memoryview from an object that implements the buffer protocol. +/* Create a memoryview from an object that implements the buffer protocol, + using the given flags. If the object is a memoryview, the new memoryview must be registered with the same managed buffer. Otherwise, a new managed buffer is created. */ -PyObject * -PyMemoryView_FromObject(PyObject *v) +static PyObject * +PyMemoryView_FromObjectAndFlags(PyObject *v, int flags) { _PyManagedBufferObject *mbuf; if (PyMemoryView_Check(v)) { PyMemoryViewObject *mv = (PyMemoryViewObject *)v; CHECK_RELEASED(mv); + CHECK_RESTRICTED(mv); return mbuf_add_view(mv->mbuf, &mv->view); } else if (PyObject_CheckBuffer(v)) { PyObject *ret; - mbuf = (_PyManagedBufferObject *)_PyManagedBuffer_FromObject(v); + mbuf = (_PyManagedBufferObject *)_PyManagedBuffer_FromObject(v, flags); if (mbuf == NULL) return NULL; ret = mbuf_add_view(mbuf, NULL); @@ -806,6 +822,38 @@ PyMemoryView_FromObject(PyObject *v) return NULL; } +/* Create a memoryview from an object that implements the buffer protocol, + using the given flags. + If the object is a memoryview, the new memoryview must be registered + with the same managed buffer. Otherwise, a new managed buffer is created. */ +PyObject * +_PyMemoryView_FromBufferProc(PyObject *v, int flags, getbufferproc bufferproc) +{ + _PyManagedBufferObject *mbuf = mbuf_alloc(); + if (mbuf == NULL) + return NULL; + + int res = bufferproc(v, &mbuf->master, flags); + if (res < 0) { + mbuf->master.obj = NULL; + Py_DECREF(mbuf); + return NULL; + } + + PyObject *ret = mbuf_add_view(mbuf, NULL); + Py_DECREF(mbuf); + return ret; +} + +/* Create a memoryview from an object that implements the buffer protocol. + If the object is a memoryview, the new memoryview must be registered + with the same managed buffer. Otherwise, a new managed buffer is created. */ +PyObject * +PyMemoryView_FromObject(PyObject *v) +{ + return PyMemoryView_FromObjectAndFlags(v, PyBUF_FULL_RO); +} + /* Copy the format string from a base object that might vanish. */ static int mbuf_copy_format(_PyManagedBufferObject *mbuf, const char *fmt) @@ -851,7 +899,7 @@ memory_from_contiguous_copy(const Py_buffer *src, char order) if (bytes == NULL) return NULL; - mbuf = (_PyManagedBufferObject *)_PyManagedBuffer_FromObject(bytes); + mbuf = (_PyManagedBufferObject *)_PyManagedBuffer_FromObject(bytes, PyBUF_FULL_RO); Py_DECREF(bytes); if (mbuf == NULL) return NULL; @@ -968,6 +1016,24 @@ memoryview_impl(PyTypeObject *type, PyObject *object) } +/*[clinic input] +@classmethod +memoryview._from_flags + + object: object + flags: int + +Create a new memoryview object which references the given object. +[clinic start generated code]*/ + +static PyObject * +memoryview__from_flags_impl(PyTypeObject *type, PyObject *object, int flags) +/*[clinic end generated code: output=bf71f9906c266ee2 input=f5f82fd0e744356b]*/ +{ + return PyMemoryView_FromObjectAndFlags(object, flags); +} + + /****************************************************************************/ /* Previously in abstract.c */ /****************************************************************************/ @@ -1370,6 +1436,7 @@ memoryview_cast_impl(PyMemoryViewObject *self, PyObject *format, Py_ssize_t ndim = 1; CHECK_RELEASED(self); + CHECK_RESTRICTED(self); if (!MV_C_CONTIGUOUS(self->flags)) { PyErr_SetString(PyExc_TypeError, @@ -1425,6 +1492,7 @@ memoryview_toreadonly_impl(PyMemoryViewObject *self) /*[clinic end generated code: output=2c7e056f04c99e62 input=dc06d20f19ba236f]*/ { CHECK_RELEASED(self); + CHECK_RESTRICTED(self); /* Even if self is already readonly, we still need to create a new * object for .release() to work correctly. */ @@ -1447,6 +1515,7 @@ memory_getbuf(PyMemoryViewObject *self, Py_buffer *view, int flags) int baseflags = self->flags; CHECK_RELEASED_INT(self); + CHECK_RESTRICTED_INT(self); /* start with complete information */ *view = *base; @@ -2508,6 +2577,7 @@ memory_subscript(PyMemoryViewObject *self, PyObject *key) return memory_item(self, index); } else if (PySlice_Check(key)) { + CHECK_RESTRICTED(self); PyMemoryViewObject *sliced; sliced = (PyMemoryViewObject *)mbuf_add_view(self->mbuf, view); @@ -3184,6 +3254,7 @@ static PyMethodDef memory_methods[] = { MEMORYVIEW_TOLIST_METHODDEF MEMORYVIEW_CAST_METHODDEF MEMORYVIEW_TOREADONLY_METHODDEF + MEMORYVIEW__FROM_FLAGS_METHODDEF {"__enter__", memory_enter, METH_NOARGS, NULL}, {"__exit__", memory_exit, METH_VARARGS, NULL}, {NULL, NULL} diff --git a/Objects/moduleobject.c b/Objects/moduleobject.c index a0be19a3ca8ac8..985be58d02c784 100644 --- a/Objects/moduleobject.c +++ b/Objects/moduleobject.c @@ -245,9 +245,12 @@ PyModule_FromDefAndSpec2(PyModuleDef* def, PyObject *spec, int module_api_versio PyObject *(*create)(PyObject *, PyModuleDef*) = NULL; PyObject *nameobj; PyObject *m = NULL; + int has_multiple_interpreters_slot = 0; + void *multiple_interpreters = (void *)0; int has_execution_slots = 0; const char *name; int ret; + PyInterpreterState *interp = _PyInterpreterState_GET(); PyModuleDef_Init(def); @@ -287,6 +290,17 @@ PyModule_FromDefAndSpec2(PyModuleDef* def, PyObject *spec, int module_api_versio case Py_mod_exec: has_execution_slots = 1; break; + case Py_mod_multiple_interpreters: + if (has_multiple_interpreters_slot) { + PyErr_Format( + PyExc_SystemError, + "module %s has more than one 'multiple interpreters' slots", + name); + goto error; + } + multiple_interpreters = cur_slot->value; + has_multiple_interpreters_slot = 1; + break; default: assert(cur_slot->slot < 0 || cur_slot->slot > _Py_mod_LAST_SLOT); PyErr_Format( @@ -297,6 +311,26 @@ PyModule_FromDefAndSpec2(PyModuleDef* def, PyObject *spec, int module_api_versio } } + /* By default, multi-phase init modules are expected + to work under multiple interpreters. */ + if (!has_multiple_interpreters_slot) { + multiple_interpreters = Py_MOD_MULTIPLE_INTERPRETERS_SUPPORTED; + } + if (multiple_interpreters == Py_MOD_MULTIPLE_INTERPRETERS_NOT_SUPPORTED) { + if (!_Py_IsMainInterpreter(interp) + && _PyImport_CheckSubinterpIncompatibleExtensionAllowed(name) < 0) + { + goto error; + } + } + else if (multiple_interpreters != Py_MOD_PER_INTERPRETER_GIL_SUPPORTED + && interp->ceval.own_gil + && !_Py_IsMainInterpreter(interp) + && _PyImport_CheckSubinterpIncompatibleExtensionAllowed(name) < 0) + { + goto error; + } + if (create) { m = create(spec, def); if (m == NULL) { @@ -421,6 +455,9 @@ PyModule_ExecDef(PyObject *module, PyModuleDef *def) return -1; } break; + case Py_mod_multiple_interpreters: + /* handled in PyModule_FromDefAndSpec2 */ + break; default: PyErr_Format( PyExc_SystemError, @@ -702,7 +739,11 @@ int _PyModuleSpec_IsInitializing(PyObject *spec) { if (spec != NULL) { - PyObject *value = PyObject_GetAttr(spec, &_Py_ID(_initializing)); + PyObject *value; + int ok = _PyObject_LookupAttr(spec, &_Py_ID(_initializing), &value); + if (ok == 0) { + return 0; + } if (value != NULL) { int initializing = PyObject_IsTrue(value); Py_DECREF(value); @@ -738,19 +779,37 @@ _PyModuleSpec_IsUninitializedSubmodule(PyObject *spec, PyObject *name) return is_uninitialized; } -static PyObject* -module_getattro(PyModuleObject *m, PyObject *name) +PyObject* +_Py_module_getattro_impl(PyModuleObject *m, PyObject *name, int suppress) { + // When suppress=1, this function suppresses AttributeError. PyObject *attr, *mod_name, *getattr; - attr = PyObject_GenericGetAttr((PyObject *)m, name); - if (attr || !PyErr_ExceptionMatches(PyExc_AttributeError)) { + attr = _PyObject_GenericGetAttrWithDict((PyObject *)m, name, NULL, suppress); + if (attr) { return attr; } - PyErr_Clear(); + if (suppress == 1) { + if (PyErr_Occurred()) { + // pass up non-AttributeError exception + return NULL; + } + } + else { + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) { + // pass up non-AttributeError exception + return NULL; + } + PyErr_Clear(); + } assert(m->md_dict != NULL); getattr = PyDict_GetItemWithError(m->md_dict, &_Py_ID(__getattr__)); if (getattr) { - return PyObject_CallOneArg(getattr, name); + PyObject *result = PyObject_CallOneArg(getattr, name); + if (result == NULL && suppress == 1 && PyErr_ExceptionMatches(PyExc_AttributeError)) { + // suppress AttributeError + PyErr_Clear(); + } + return result; } if (PyErr_Occurred()) { return NULL; @@ -763,37 +822,48 @@ module_getattro(PyModuleObject *m, PyObject *name) Py_DECREF(mod_name); return NULL; } - Py_XINCREF(spec); - if (_PyModuleSpec_IsInitializing(spec)) { - PyErr_Format(PyExc_AttributeError, - "partially initialized " - "module '%U' has no attribute '%U' " - "(most likely due to a circular import)", - mod_name, name); - } - else if (_PyModuleSpec_IsUninitializedSubmodule(spec, name)) { - PyErr_Format(PyExc_AttributeError, - "cannot access submodule '%U' of module '%U' " - "(most likely due to a circular import)", - name, mod_name); - } - else { - PyErr_Format(PyExc_AttributeError, - "module '%U' has no attribute '%U'", - mod_name, name); + if (suppress != 1) { + Py_XINCREF(spec); + if (_PyModuleSpec_IsInitializing(spec)) { + PyErr_Format(PyExc_AttributeError, + "partially initialized " + "module '%U' has no attribute '%U' " + "(most likely due to a circular import)", + mod_name, name); + } + else if (_PyModuleSpec_IsUninitializedSubmodule(spec, name)) { + PyErr_Format(PyExc_AttributeError, + "cannot access submodule '%U' of module '%U' " + "(most likely due to a circular import)", + name, mod_name); + } + else { + PyErr_Format(PyExc_AttributeError, + "module '%U' has no attribute '%U'", + mod_name, name); + } + Py_XDECREF(spec); } - Py_XDECREF(spec); Py_DECREF(mod_name); return NULL; } else if (PyErr_Occurred()) { return NULL; } - PyErr_Format(PyExc_AttributeError, - "module has no attribute '%U'", name); + if (suppress != 1) { + PyErr_Format(PyExc_AttributeError, + "module has no attribute '%U'", name); + } return NULL; } + +PyObject* +_Py_module_getattro(PyModuleObject *m, PyObject *name) +{ + return _Py_module_getattro_impl(m, name, 0); +} + static int module_traverse(PyModuleObject *m, visitproc visit, void *arg) { @@ -951,7 +1021,7 @@ PyTypeObject PyModule_Type = { 0, /* tp_hash */ 0, /* tp_call */ 0, /* tp_str */ - (getattrofunc)module_getattro, /* tp_getattro */ + (getattrofunc)_Py_module_getattro, /* tp_getattro */ PyObject_GenericSetAttr, /* tp_setattro */ 0, /* tp_as_buffer */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | diff --git a/Objects/object.c b/Objects/object.c index 4ce10cf1192d3f..a7c79c673d5fd3 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -14,6 +14,7 @@ #include "pycore_pymem.h" // _PyMem_IsPtrFreed() #include "pycore_pystate.h" // _PyThreadState_GET() #include "pycore_symtable.h" // PySTEntry_Type +#include "pycore_typeobject.h" // _PyBufferWrapper_Type #include "pycore_unionobject.h" // _PyUnion_Type #include "pycore_interpreteridobject.h" // _PyInterpreterID_Type @@ -918,13 +919,24 @@ PyObject_GetAttrString(PyObject *v, const char *name) int PyObject_HasAttrString(PyObject *v, const char *name) { - PyObject *res = PyObject_GetAttrString(v, name); - if (res != NULL) { - Py_DECREF(res); - return 1; + if (Py_TYPE(v)->tp_getattr != NULL) { + PyObject *res = (*Py_TYPE(v)->tp_getattr)(v, (char*)name); + if (res != NULL) { + Py_DECREF(res); + return 1; + } + PyErr_Clear(); + return 0; } - PyErr_Clear(); - return 0; + + PyObject *attr_name = PyUnicode_FromString(name); + if (attr_name == NULL) { + PyErr_Clear(); + return 0; + } + int ok = PyObject_HasAttr(v, attr_name); + Py_DECREF(attr_name); + return ok; } int @@ -1074,6 +1086,17 @@ _PyObject_LookupAttr(PyObject *v, PyObject *name, PyObject **result) return 0; } } + else if (tp->tp_getattro == (getattrofunc)_Py_module_getattro) { + // optimization: suppress attribute error from module getattro method + *result = _Py_module_getattro_impl((PyModuleObject*)v, name, 1); + if (*result != NULL) { + return 1; + } + if (PyErr_Occurred()) { + return -1; + } + return 0; + } else if (tp->tp_getattro != NULL) { *result = (*tp->tp_getattro)(v, name); } @@ -2062,6 +2085,7 @@ static PyTypeObject* static_types[] = { &_PyAsyncGenASend_Type, &_PyAsyncGenAThrow_Type, &_PyAsyncGenWrappedValue_Type, + &_PyBufferWrapper_Type, &_PyContextTokenMissing_Type, &_PyCoroWrapper_Type, &_Py_GenericAliasIterType, @@ -2105,7 +2129,7 @@ _PyTypes_InitTypes(PyInterpreterState *interp) // All other static types (unless initialized elsewhere) for (size_t i=0; i < Py_ARRAY_LENGTH(static_types); i++) { PyTypeObject *type = static_types[i]; - if (_PyStaticType_InitBuiltin(type) < 0) { + if (_PyStaticType_InitBuiltin(interp, type) < 0) { return _PyStatus_ERR("Can't initialize builtin type"); } if (type == &PyType_Type) { @@ -2128,15 +2152,11 @@ _PyTypes_InitTypes(PyInterpreterState *interp) void _PyTypes_FiniTypes(PyInterpreterState *interp) { - if (!_Py_IsMainInterpreter(interp)) { - return; - } - // Deallocate types in the reverse order to deallocate subclasses before // their base classes. for (Py_ssize_t i=Py_ARRAY_LENGTH(static_types)-1; i>=0; i--) { PyTypeObject *type = static_types[i]; - _PyStaticType_Dealloc(type); + _PyStaticType_Dealloc(interp, type); } } diff --git a/Objects/structseq.c b/Objects/structseq.c index 88a71bc52958f5..8b1895957101a4 100644 --- a/Objects/structseq.c +++ b/Objects/structseq.c @@ -26,7 +26,7 @@ const char * const PyStructSequence_UnnamedField = "unnamed field"; static Py_ssize_t get_type_attr_as_size(PyTypeObject *tp, PyObject *name) { - PyObject *v = PyDict_GetItemWithError(tp->tp_dict, name); + PyObject *v = PyDict_GetItemWithError(_PyType_GetDict(tp), name); if (v == NULL && !PyErr_Occurred()) { PyErr_Format(PyExc_TypeError, "Missed attribute '%U' of type %s", @@ -433,12 +433,10 @@ initialize_structseq_dict(PyStructSequence_Desc *desc, PyObject* dict, static PyMemberDef * initialize_members(PyStructSequence_Desc *desc, - Py_ssize_t *pn_members, Py_ssize_t *pn_unnamed_members) + Py_ssize_t n_members, Py_ssize_t n_unnamed_members) { PyMemberDef *members; - Py_ssize_t n_members, n_unnamed_members; - n_members = count_members(desc, &n_unnamed_members); members = PyMem_NEW(PyMemberDef, n_members - n_unnamed_members + 1); if (members == NULL) { PyErr_NoMemory(); @@ -463,8 +461,6 @@ initialize_members(PyStructSequence_Desc *desc, } members[k].name = NULL; - *pn_members = n_members; - *pn_unnamed_members = n_unnamed_members; return members; } @@ -497,7 +493,7 @@ initialize_static_type(PyTypeObject *type, PyStructSequence_Desc *desc, Py_INCREF(type); if (initialize_structseq_dict( - desc, type->tp_dict, n_members, n_unnamed_members) < 0) { + desc, _PyType_GetDict(type), n_members, n_unnamed_members) < 0) { Py_DECREF(type); return -1; } @@ -506,43 +502,58 @@ initialize_static_type(PyTypeObject *type, PyStructSequence_Desc *desc, } int -_PyStructSequence_InitBuiltinWithFlags(PyTypeObject *type, +_PyStructSequence_InitBuiltinWithFlags(PyInterpreterState *interp, + PyTypeObject *type, PyStructSequence_Desc *desc, unsigned long tp_flags) { - if (type->tp_flags & Py_TPFLAGS_READY) { - if (_PyStaticType_InitBuiltin(type) < 0) { - goto failed_init_builtin; - } - return 0; - } + Py_ssize_t n_unnamed_members; + Py_ssize_t n_members = count_members(desc, &n_unnamed_members); + PyMemberDef *members = NULL; - PyMemberDef *members; - Py_ssize_t n_members, n_unnamed_members; + if ((type->tp_flags & Py_TPFLAGS_READY) == 0) { + assert(type->tp_name == NULL); + assert(type->tp_members == NULL); + assert(type->tp_base == NULL); - members = initialize_members(desc, &n_members, &n_unnamed_members); - if (members == NULL) { - return -1; + members = initialize_members(desc, n_members, n_unnamed_members); + if (members == NULL) { + goto error; + } + initialize_static_fields(type, desc, members, tp_flags); + + _Py_SetImmortal(type); } - initialize_static_fields(type, desc, members, tp_flags); +#ifndef NDEBUG + else { + // Ensure that the type was initialized. + assert(type->tp_name != NULL); + assert(type->tp_members != NULL); + assert(type->tp_base == &PyTuple_Type); + assert((type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN)); + assert(_Py_IsImmortal(type)); + } +#endif - Py_INCREF(type); // XXX It should be immortal. - if (_PyStaticType_InitBuiltin(type) < 0) { - PyMem_Free(members); - goto failed_init_builtin; + if (_PyStaticType_InitBuiltin(interp, type) < 0) { + PyErr_Format(PyExc_RuntimeError, + "Can't initialize builtin type %s", + desc->name); + goto error; } if (initialize_structseq_dict( - desc, type->tp_dict, n_members, n_unnamed_members) < 0) { - PyMem_Free(members); - return -1; + desc, _PyType_GetDict(type), n_members, n_unnamed_members) < 0) + { + goto error; } + return 0; -failed_init_builtin: - PyErr_Format(PyExc_RuntimeError, - "Can't initialize builtin type %s", - desc->name); +error: + if (members != NULL) { + PyMem_Free(members); + } return -1; } @@ -566,7 +577,8 @@ PyStructSequence_InitType2(PyTypeObject *type, PyStructSequence_Desc *desc) return -1; } - members = initialize_members(desc, &n_members, &n_unnamed_members); + n_members = count_members(desc, &n_unnamed_members); + members = initialize_members(desc, n_members, n_unnamed_members); if (members == NULL) { return -1; } @@ -585,35 +597,34 @@ PyStructSequence_InitType(PyTypeObject *type, PyStructSequence_Desc *desc) } +/* This is exposed in the internal API, not the public API. + It is only called on builtin static types, which are all + initialized via _PyStructSequence_InitBuiltinWithFlags(). */ + void -_PyStructSequence_FiniType(PyTypeObject *type) +_PyStructSequence_FiniBuiltin(PyInterpreterState *interp, PyTypeObject *type) { // Ensure that the type is initialized assert(type->tp_name != NULL); assert(type->tp_base == &PyTuple_Type); + assert((type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN)); + assert(_Py_IsImmortal(type)); // Cannot delete a type if it still has subclasses if (_PyType_HasSubclasses(type)) { + // XXX Shouldn't this be an error? return; } - // Undo PyStructSequence_NewType() - type->tp_name = NULL; - PyMem_Free(type->tp_members); + _PyStaticType_Dealloc(interp, type); - _PyStaticType_Dealloc(type); - assert(Py_REFCNT(type) == 1); - // Undo Py_INCREF(type) of _PyStructSequence_InitType(). - // Don't use Py_DECREF(): static type must not be deallocated - Py_SET_REFCNT(type, 0); -#ifdef Py_REF_DEBUG - _Py_DecRefTotal(_PyInterpreterState_GET()); -#endif - - // Make sure that _PyStructSequence_InitType() will initialize - // the type again - assert(Py_REFCNT(type) == 0); - assert(type->tp_name == NULL); + if (_Py_IsMainInterpreter(interp)) { + // Undo _PyStructSequence_InitBuiltinWithFlags(). + type->tp_name = NULL; + PyMem_Free(type->tp_members); + type->tp_members = NULL; + type->tp_base = NULL; + } } @@ -627,7 +638,8 @@ _PyStructSequence_NewType(PyStructSequence_Desc *desc, unsigned long tp_flags) Py_ssize_t n_members, n_unnamed_members; /* Initialize MemberDefs */ - members = initialize_members(desc, &n_members, &n_unnamed_members); + n_members = count_members(desc, &n_unnamed_members); + members = initialize_members(desc, n_members, n_unnamed_members); if (members == NULL) { return NULL; } @@ -658,7 +670,7 @@ _PyStructSequence_NewType(PyStructSequence_Desc *desc, unsigned long tp_flags) } if (initialize_structseq_dict( - desc, type->tp_dict, n_members, n_unnamed_members) < 0) { + desc, _PyType_GetDict(type), n_members, n_unnamed_members) < 0) { Py_DECREF(type); return NULL; } diff --git a/Objects/typeobject.c b/Objects/typeobject.c index e807cc90faa16a..98fac276a873e1 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -6,6 +6,7 @@ #include "pycore_symtable.h" // _Py_Mangle() #include "pycore_dict.h" // _PyDict_KeysSize() #include "pycore_initconfig.h" // _PyStatus_OK() +#include "pycore_memoryobject.h" // _PyMemoryView_FromBufferProc() #include "pycore_moduleobject.h" // _PyModule_GetDef() #include "pycore_object.h" // _PyType_HasFeature() #include "pycore_long.h" // _PyLong_IsNegative() @@ -18,6 +19,7 @@ #include "structmember.h" // PyMemberDef #include +#include // ptrdiff_t /*[clinic input] class type "PyTypeObject *" "&PyType_Type" @@ -54,6 +56,11 @@ typedef struct PySlot_Offset { short slot_offset; } PySlot_Offset; +static void +slot_bf_releasebuffer(PyObject *self, Py_buffer *buffer); + +static void +releasebuffer_call_python(PyObject *self, Py_buffer *buffer); static PyObject * slot_tp_new(PyTypeObject *type, PyObject *args, PyObject *kwds); @@ -64,18 +71,28 @@ lookup_maybe_method(PyObject *self, PyObject *attr, int *unbound); static int slot_tp_setattro(PyObject *self, PyObject *name, PyObject *value); -static inline PyTypeObject * subclass_from_ref(PyObject *ref); + +static inline PyTypeObject * +type_from_ref(PyObject *ref) +{ + assert(PyWeakref_CheckRef(ref)); + PyObject *obj = PyWeakref_GET_OBJECT(ref); // borrowed ref + assert(obj != NULL); + if (obj == Py_None) { + return NULL; + } + assert(PyType_Check(obj)); + return _PyType_CAST(obj); +} /* helpers for for static builtin types */ -#ifndef NDEBUG static inline int static_builtin_index_is_set(PyTypeObject *self) { return self->tp_subclasses != NULL; } -#endif static inline size_t static_builtin_index_get(PyTypeObject *self) @@ -107,43 +124,46 @@ static_builtin_state_get(PyInterpreterState *interp, PyTypeObject *self) /* For static types we store some state in an array on each interpreter. */ static_builtin_state * -_PyStaticType_GetState(PyTypeObject *self) +_PyStaticType_GetState(PyInterpreterState *interp, PyTypeObject *self) { assert(self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN); - PyInterpreterState *interp = _PyInterpreterState_GET(); return static_builtin_state_get(interp, self); } +/* Set the type's per-interpreter state. */ static void -static_builtin_state_init(PyTypeObject *self) +static_builtin_state_init(PyInterpreterState *interp, PyTypeObject *self) { - /* Set the type's per-interpreter state. */ - PyInterpreterState *interp = _PyInterpreterState_GET(); + if (!static_builtin_index_is_set(self)) { + static_builtin_index_set(self, interp->types.num_builtins_initialized); + } + static_builtin_state *state = static_builtin_state_get(interp, self); /* It should only be called once for each builtin type. */ - assert(!static_builtin_index_is_set(self)); - - static_builtin_index_set(self, interp->types.num_builtins_initialized); - interp->types.num_builtins_initialized++; - - static_builtin_state *state = static_builtin_state_get(interp, self); + assert(state->type == NULL); state->type = self; + /* state->tp_subclasses is left NULL until init_subclasses() sets it. */ /* state->tp_weaklist is left NULL until insert_head() or insert_after() (in weakrefobject.c) sets it. */ + + interp->types.num_builtins_initialized++; } +/* Reset the type's per-interpreter state. + This basically undoes what static_builtin_state_init() did. */ static void -static_builtin_state_clear(PyTypeObject *self) +static_builtin_state_clear(PyInterpreterState *interp, PyTypeObject *self) { - /* Reset the type's per-interpreter state. - This basically undoes what static_builtin_state_init() did. */ - PyInterpreterState *interp = _PyInterpreterState_GET(); - static_builtin_state *state = static_builtin_state_get(interp, self); + + assert(state->type != NULL); state->type = NULL; assert(state->tp_weaklist == NULL); // It was already cleared out. - static_builtin_index_clear(self); + + if (_Py_IsMainInterpreter(interp)) { + static_builtin_index_clear(self); + } assert(interp->types.num_builtins_initialized > 0); interp->types.num_builtins_initialized--; @@ -154,6 +174,284 @@ static_builtin_state_clear(PyTypeObject *self) /* end static builtin helpers */ +static inline void +start_readying(PyTypeObject *type) +{ + if (type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { + PyInterpreterState *interp = _PyInterpreterState_GET(); + static_builtin_state *state = static_builtin_state_get(interp, type); + assert(state != NULL); + assert(!state->readying); + state->readying = 1; + return; + } + assert((type->tp_flags & Py_TPFLAGS_READYING) == 0); + type->tp_flags |= Py_TPFLAGS_READYING; +} + +static inline void +stop_readying(PyTypeObject *type) +{ + if (type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { + PyInterpreterState *interp = _PyInterpreterState_GET(); + static_builtin_state *state = static_builtin_state_get(interp, type); + assert(state != NULL); + assert(state->readying); + state->readying = 0; + return; + } + assert(type->tp_flags & Py_TPFLAGS_READYING); + type->tp_flags &= ~Py_TPFLAGS_READYING; +} + +static inline int +is_readying(PyTypeObject *type) +{ + if (type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { + PyInterpreterState *interp = _PyInterpreterState_GET(); + static_builtin_state *state = static_builtin_state_get(interp, type); + assert(state != NULL); + return state->readying; + } + return (type->tp_flags & Py_TPFLAGS_READYING) != 0; +} + + +/* accessors for objects stored on PyTypeObject */ + +static inline PyObject * +lookup_tp_dict(PyTypeObject *self) +{ + if (self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { + PyInterpreterState *interp = _PyInterpreterState_GET(); + static_builtin_state *state = _PyStaticType_GetState(interp, self); + assert(state != NULL); + return state->tp_dict; + } + return self->tp_dict; +} + +PyObject * +_PyType_GetDict(PyTypeObject *self) +{ + /* It returns a borrowed reference. */ + return lookup_tp_dict(self); +} + +static inline void +set_tp_dict(PyTypeObject *self, PyObject *dict) +{ + if (self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { + PyInterpreterState *interp = _PyInterpreterState_GET(); + static_builtin_state *state = _PyStaticType_GetState(interp, self); + assert(state != NULL); + state->tp_dict = dict; + return; + } + self->tp_dict = dict; +} + +static inline void +clear_tp_dict(PyTypeObject *self) +{ + if (self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { + PyInterpreterState *interp = _PyInterpreterState_GET(); + static_builtin_state *state = _PyStaticType_GetState(interp, self); + assert(state != NULL); + Py_CLEAR(state->tp_dict); + return; + } + Py_CLEAR(self->tp_dict); +} + + +static inline PyObject * +lookup_tp_bases(PyTypeObject *self) +{ + if (self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { + PyInterpreterState *interp = _PyInterpreterState_GET(); + static_builtin_state *state = _PyStaticType_GetState(interp, self); + assert(state != NULL); + return state->tp_bases; + } + return self->tp_bases; +} + +PyObject * +_PyType_GetBases(PyTypeObject *self) +{ + /* It returns a borrowed reference. */ + return lookup_tp_bases(self); +} + +static inline void +set_tp_bases(PyTypeObject *self, PyObject *bases) +{ + if (self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { + PyInterpreterState *interp = _PyInterpreterState_GET(); + static_builtin_state *state = _PyStaticType_GetState(interp, self); + assert(state != NULL); + state->tp_bases = bases; + return; + } + self->tp_bases = bases; +} + +static inline void +clear_tp_bases(PyTypeObject *self) +{ + if (self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { + PyInterpreterState *interp = _PyInterpreterState_GET(); + static_builtin_state *state = _PyStaticType_GetState(interp, self); + assert(state != NULL); + Py_CLEAR(state->tp_bases); + return; + } + Py_CLEAR(self->tp_bases); +} + + +static inline PyObject * +lookup_tp_mro(PyTypeObject *self) +{ + if (self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { + PyInterpreterState *interp = _PyInterpreterState_GET(); + static_builtin_state *state = _PyStaticType_GetState(interp, self); + assert(state != NULL); + return state->tp_mro; + } + return self->tp_mro; +} + +PyObject * +_PyType_GetMRO(PyTypeObject *self) +{ + /* It returns a borrowed reference. */ + return lookup_tp_mro(self); +} + +static inline void +set_tp_mro(PyTypeObject *self, PyObject *mro) +{ + if (self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { + PyInterpreterState *interp = _PyInterpreterState_GET(); + static_builtin_state *state = _PyStaticType_GetState(interp, self); + assert(state != NULL); + state->tp_mro = mro; + return; + } + self->tp_mro = mro; +} + +static inline void +clear_tp_mro(PyTypeObject *self) +{ + if (self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { + PyInterpreterState *interp = _PyInterpreterState_GET(); + static_builtin_state *state = _PyStaticType_GetState(interp, self); + assert(state != NULL); + Py_CLEAR(state->tp_mro); + return; + } + Py_CLEAR(self->tp_mro); +} + + +static PyObject * +init_tp_subclasses(PyTypeObject *self) +{ + PyObject *subclasses = PyDict_New(); + if (subclasses == NULL) { + return NULL; + } + if (self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { + PyInterpreterState *interp = _PyInterpreterState_GET(); + static_builtin_state *state = _PyStaticType_GetState(interp, self); + state->tp_subclasses = subclasses; + return subclasses; + } + self->tp_subclasses = (void *)subclasses; + return subclasses; +} + +static void +clear_tp_subclasses(PyTypeObject *self) +{ + /* Delete the dictionary to save memory. _PyStaticType_Dealloc() + callers also test if tp_subclasses is NULL to check if a static type + has no subclass. */ + if (self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { + PyInterpreterState *interp = _PyInterpreterState_GET(); + static_builtin_state *state = _PyStaticType_GetState(interp, self); + Py_CLEAR(state->tp_subclasses); + return; + } + Py_CLEAR(self->tp_subclasses); +} + +static inline PyObject * +lookup_tp_subclasses(PyTypeObject *self) +{ + if (self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { + PyInterpreterState *interp = _PyInterpreterState_GET(); + static_builtin_state *state = _PyStaticType_GetState(interp, self); + assert(state != NULL); + return state->tp_subclasses; + } + return (PyObject *)self->tp_subclasses; +} + +int +_PyType_HasSubclasses(PyTypeObject *self) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + if (self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN + // XXX _PyStaticType_GetState() should never return NULL. + && _PyStaticType_GetState(interp, self) == NULL) + { + return 0; + } + if (lookup_tp_subclasses(self) == NULL) { + return 0; + } + return 1; +} + +PyObject* +_PyType_GetSubclasses(PyTypeObject *self) +{ + PyObject *list = PyList_New(0); + if (list == NULL) { + return NULL; + } + + PyObject *subclasses = lookup_tp_subclasses(self); // borrowed ref + if (subclasses == NULL) { + return list; + } + assert(PyDict_CheckExact(subclasses)); + // The loop cannot modify tp_subclasses, there is no need + // to hold a strong reference (use a borrowed reference). + + Py_ssize_t i = 0; + PyObject *ref; // borrowed ref + while (PyDict_Next(subclasses, &i, NULL, &ref)) { + PyTypeObject *subclass = type_from_ref(ref); // borrowed + if (subclass == NULL) { + continue; + } + + if (PyList_Append(list, _PyObject_CAST(subclass)) < 0) { + Py_DECREF(list); + return NULL; + } + } + return list; +} + +/* end accessors for objects stored on PyTypeObject */ + + /* * finds the beginning of the docstring's introspection signature. * if present, returns a pointer pointing to the first '('. @@ -223,8 +521,8 @@ _PyType_CheckConsistency(PyTypeObject *type) CHECK(Py_REFCNT(type) >= 1); CHECK(PyType_Check(type)); - CHECK(!(type->tp_flags & Py_TPFLAGS_READYING)); - CHECK(type->tp_dict != NULL); + CHECK(!is_readying(type)); + CHECK(lookup_tp_dict(type) != NULL); if (type->tp_flags & Py_TPFLAGS_HAVE_GC) { // bpo-44263: tp_traverse is required if Py_TPFLAGS_HAVE_GC is set. @@ -234,7 +532,7 @@ _PyType_CheckConsistency(PyTypeObject *type) if (type->tp_flags & Py_TPFLAGS_DISALLOW_INSTANTIATION) { CHECK(type->tp_new == NULL); - CHECK(PyDict_Contains(type->tp_dict, &_Py_ID(__new__)) == 0); + CHECK(PyDict_Contains(lookup_tp_dict(type), &_Py_ID(__new__)) == 0); } return 1; @@ -360,8 +658,6 @@ _PyTypes_Fini(PyInterpreterState *interp) } -static PyObject * lookup_subclasses(PyTypeObject *); - int PyType_AddWatcher(PyType_WatchCallback callback) { @@ -461,14 +757,14 @@ PyType_Modified(PyTypeObject *type) return; } - PyObject *subclasses = lookup_subclasses(type); + PyObject *subclasses = lookup_tp_subclasses(type); if (subclasses != NULL) { assert(PyDict_CheckExact(subclasses)); Py_ssize_t i = 0; PyObject *ref; while (PyDict_Next(subclasses, &i, NULL, &ref)) { - PyTypeObject *subclass = subclass_from_ref(ref); // borrowed + PyTypeObject *subclass = type_from_ref(ref); // borrowed if (subclass == NULL) { continue; } @@ -597,7 +893,7 @@ assign_version_tag(PyInterpreterState *interp, PyTypeObject *type) assert (type->tp_version_tag != 0); } - PyObject *bases = type->tp_bases; + PyObject *bases = lookup_tp_bases(type); Py_ssize_t n = PyTuple_GET_SIZE(bases); for (Py_ssize_t i = 0; i < n; i++) { PyObject *b = PyTuple_GET_ITEM(bases, i); @@ -626,7 +922,6 @@ static PyMemberDef type_members[] = { {"__base__", T_OBJECT, offsetof(PyTypeObject, tp_base), READONLY}, {"__dictoffset__", T_PYSSIZET, offsetof(PyTypeObject, tp_dictoffset), READONLY}, - {"__mro__", T_OBJECT, offsetof(PyTypeObject, tp_mro), READONLY}, {0} }; @@ -748,7 +1043,8 @@ type_module(PyTypeObject *type, void *context) PyObject *mod; if (type->tp_flags & Py_TPFLAGS_HEAPTYPE) { - mod = PyDict_GetItemWithError(type->tp_dict, &_Py_ID(__module__)); + PyObject *dict = lookup_tp_dict(type); + mod = PyDict_GetItemWithError(dict, &_Py_ID(__module__)); if (mod == NULL) { if (!PyErr_Occurred()) { PyErr_Format(PyExc_AttributeError, "__module__"); @@ -780,7 +1076,8 @@ type_set_module(PyTypeObject *type, PyObject *value, void *context) PyType_Modified(type); - return PyDict_SetItem(type->tp_dict, &_Py_ID(__module__), value); + PyObject *dict = lookup_tp_dict(type); + return PyDict_SetItem(dict, &_Py_ID(__module__), value); } static PyObject * @@ -789,9 +1086,10 @@ type_abstractmethods(PyTypeObject *type, void *context) PyObject *mod = NULL; /* type itself has an __abstractmethods__ descriptor (this). Don't return that. */ - if (type != &PyType_Type) - mod = PyDict_GetItemWithError(type->tp_dict, - &_Py_ID(__abstractmethods__)); + if (type != &PyType_Type) { + PyObject *dict = lookup_tp_dict(type); + mod = PyDict_GetItemWithError(dict, &_Py_ID(__abstractmethods__)); + } if (!mod) { if (!PyErr_Occurred()) { PyErr_SetObject(PyExc_AttributeError, &_Py_ID(__abstractmethods__)); @@ -809,15 +1107,16 @@ type_set_abstractmethods(PyTypeObject *type, PyObject *value, void *context) special to update subclasses. */ int abstract, res; + PyObject *dict = lookup_tp_dict(type); if (value != NULL) { abstract = PyObject_IsTrue(value); if (abstract < 0) return -1; - res = PyDict_SetItem(type->tp_dict, &_Py_ID(__abstractmethods__), value); + res = PyDict_SetItem(dict, &_Py_ID(__abstractmethods__), value); } else { abstract = 0; - res = PyDict_DelItem(type->tp_dict, &_Py_ID(__abstractmethods__)); + res = PyDict_DelItem(dict, &_Py_ID(__abstractmethods__)); if (res && PyErr_ExceptionMatches(PyExc_KeyError)) { PyErr_SetObject(PyExc_AttributeError, &_Py_ID(__abstractmethods__)); return -1; @@ -836,7 +1135,21 @@ type_set_abstractmethods(PyTypeObject *type, PyObject *value, void *context) static PyObject * type_get_bases(PyTypeObject *type, void *context) { - return Py_NewRef(type->tp_bases); + PyObject *bases = lookup_tp_bases(type); + if (bases == NULL) { + Py_RETURN_NONE; + } + return Py_NewRef(bases); +} + +static PyObject * +type_get_mro(PyTypeObject *type, void *context) +{ + PyObject *mro = lookup_tp_mro(type); + if (mro == NULL) { + Py_RETURN_NONE; + } + return Py_NewRef(mro); } static PyTypeObject *best_base(PyObject *); @@ -864,7 +1177,7 @@ mro_hierarchy(PyTypeObject *type, PyObject *temp) /* error / reentrance */ return res; } - PyObject *new_mro = type->tp_mro; + PyObject *new_mro = lookup_tp_mro(type); PyObject *tuple; if (old_mro != NULL) { @@ -883,7 +1196,7 @@ mro_hierarchy(PyTypeObject *type, PyObject *temp) Py_XDECREF(tuple); if (res < 0) { - type->tp_mro = old_mro; + set_tp_mro(type, old_mro); Py_DECREF(new_mro); return -1; } @@ -962,7 +1275,8 @@ type_set_bases(PyTypeObject *type, PyObject *new_bases, void *context) below), which in turn may cause an inheritance cycle through tp_base chain. And this is definitely not what you want to ever happen. */ - (base->tp_mro != NULL && type_is_subtype_base_chain(base, type))) + (lookup_tp_mro(base) != NULL + && type_is_subtype_base_chain(base, type))) { PyErr_SetString(PyExc_TypeError, "a __bases__ item causes an inheritance cycle"); @@ -979,11 +1293,11 @@ type_set_bases(PyTypeObject *type, PyObject *new_bases, void *context) return -1; } - PyObject *old_bases = type->tp_bases; + PyObject *old_bases = lookup_tp_bases(type); assert(old_bases != NULL); PyTypeObject *old_base = type->tp_base; - type->tp_bases = Py_NewRef(new_bases); + set_tp_bases(type, Py_NewRef(new_bases)); type->tp_base = (PyTypeObject *)Py_NewRef(new_base); PyObject *temp = PyList_New(0); @@ -998,7 +1312,7 @@ type_set_bases(PyTypeObject *type, PyObject *new_bases, void *context) /* Take no action in case if type->tp_bases has been replaced through reentrance. */ int res; - if (type->tp_bases == new_bases) { + if (lookup_tp_bases(type) == new_bases) { /* any base that was in __bases__ but now isn't, we need to remove |type| from its tp_subclasses. conversely, any class now in __bases__ that wasn't @@ -1029,18 +1343,18 @@ type_set_bases(PyTypeObject *type, PyObject *new_bases, void *context) PyArg_UnpackTuple(PyList_GET_ITEM(temp, i), "", 2, 3, &cls, &new_mro, &old_mro); /* Do not rollback if cls has a newer version of MRO. */ - if (cls->tp_mro == new_mro) { - cls->tp_mro = Py_XNewRef(old_mro); + if (lookup_tp_mro(cls) == new_mro) { + set_tp_mro(cls, Py_XNewRef(old_mro)); Py_DECREF(new_mro); } } Py_DECREF(temp); bail: - if (type->tp_bases == new_bases) { + if (lookup_tp_bases(type) == new_bases) { assert(type->tp_base == new_base); - type->tp_bases = old_bases; + set_tp_bases(type, old_bases); type->tp_base = old_base; Py_DECREF(new_bases); @@ -1058,10 +1372,11 @@ type_set_bases(PyTypeObject *type, PyObject *new_bases, void *context) static PyObject * type_dict(PyTypeObject *type, void *context) { - if (type->tp_dict == NULL) { + PyObject *dict = lookup_tp_dict(type); + if (dict == NULL) { Py_RETURN_NONE; } - return PyDictProxy_New(type->tp_dict); + return PyDictProxy_New(dict); } static PyObject * @@ -1071,7 +1386,8 @@ type_get_doc(PyTypeObject *type, void *context) if (!(type->tp_flags & Py_TPFLAGS_HEAPTYPE) && type->tp_doc != NULL) { return _PyType_GetDocFromInternalDoc(type->tp_name, type->tp_doc); } - result = PyDict_GetItemWithError(type->tp_dict, &_Py_ID(__doc__)); + PyObject *dict = lookup_tp_dict(type); + result = PyDict_GetItemWithError(dict, &_Py_ID(__doc__)); if (result == NULL) { if (!PyErr_Occurred()) { result = Py_NewRef(Py_None); @@ -1099,7 +1415,8 @@ type_set_doc(PyTypeObject *type, PyObject *value, void *context) if (!check_set_special_type_attr(type, value, "__doc__")) return -1; PyType_Modified(type); - return PyDict_SetItem(type->tp_dict, &_Py_ID(__doc__), value); + PyObject *dict = lookup_tp_dict(type); + return PyDict_SetItem(dict, &_Py_ID(__doc__), value); } static PyObject * @@ -1112,9 +1429,9 @@ type_get_annotations(PyTypeObject *type, void *context) PyObject *annotations; /* there's no _PyDict_GetItemId without WithError, so let's LBYL. */ - if (PyDict_Contains(type->tp_dict, &_Py_ID(__annotations__))) { - annotations = PyDict_GetItemWithError( - type->tp_dict, &_Py_ID(__annotations__)); + PyObject *dict = lookup_tp_dict(type); + if (PyDict_Contains(dict, &_Py_ID(__annotations__))) { + annotations = PyDict_GetItemWithError(dict, &_Py_ID(__annotations__)); /* ** PyDict_GetItemWithError could still fail, ** for instance with a well-timed Ctrl-C or a MemoryError. @@ -1132,7 +1449,7 @@ type_get_annotations(PyTypeObject *type, void *context) annotations = PyDict_New(); if (annotations) { int result = PyDict_SetItem( - type->tp_dict, &_Py_ID(__annotations__), annotations); + dict, &_Py_ID(__annotations__), annotations); if (result) { Py_CLEAR(annotations); } else { @@ -1154,16 +1471,17 @@ type_set_annotations(PyTypeObject *type, PyObject *value, void *context) } int result; + PyObject *dict = lookup_tp_dict(type); if (value != NULL) { /* set */ - result = PyDict_SetItem(type->tp_dict, &_Py_ID(__annotations__), value); + result = PyDict_SetItem(dict, &_Py_ID(__annotations__), value); } else { /* delete */ - if (!PyDict_Contains(type->tp_dict, &_Py_ID(__annotations__))) { + if (!PyDict_Contains(dict, &_Py_ID(__annotations__))) { PyErr_Format(PyExc_AttributeError, "__annotations__"); return -1; } - result = PyDict_DelItem(type->tp_dict, &_Py_ID(__annotations__)); + result = PyDict_DelItem(dict, &_Py_ID(__annotations__)); } if (result == 0) { @@ -1210,6 +1528,7 @@ static PyGetSetDef type_getsets[] = { {"__name__", (getter)type_name, (setter)type_set_name, NULL}, {"__qualname__", (getter)type_qualname, (setter)type_set_qualname, NULL}, {"__bases__", (getter)type_get_bases, (setter)type_set_bases, NULL}, + {"__mro__", (getter)type_get_mro, NULL, NULL}, {"__module__", (getter)type_module, (setter)type_set_module, NULL}, {"__abstractmethods__", (getter)type_abstractmethods, (setter)type_set_abstractmethods, NULL}, @@ -1374,6 +1693,12 @@ PyType_GenericNew(PyTypeObject *type, PyObject *args, PyObject *kwds) /* Helpers for subtyping */ +static inline PyMemberDef * +_PyHeapType_GET_MEMBERS(PyHeapTypeObject* type) +{ + return PyObject_GetItemData((PyObject *)type); +} + static int traverse_slots(PyTypeObject *type, PyObject *self, visitproc visit, void *arg) { @@ -1750,7 +2075,7 @@ PyType_IsSubtype(PyTypeObject *a, PyTypeObject *b) { PyObject *mro; - mro = a->tp_mro; + mro = lookup_tp_mro(a); if (mro != NULL) { /* Deal with multiple inheritance without recursion by walking the MRO tuple */ @@ -2134,17 +2459,17 @@ mro_implementation(PyTypeObject *type) return NULL; } - PyObject *bases = type->tp_bases; + PyObject *bases = lookup_tp_bases(type); Py_ssize_t n = PyTuple_GET_SIZE(bases); for (Py_ssize_t i = 0; i < n; i++) { PyTypeObject *base = _PyType_CAST(PyTuple_GET_ITEM(bases, i)); - if (base->tp_mro == NULL) { + if (lookup_tp_mro(base) == NULL) { PyErr_Format(PyExc_TypeError, "Cannot extend an incomplete type '%.100s'", base->tp_name); return NULL; } - assert(PyTuple_Check(base->tp_mro)); + assert(PyTuple_Check(lookup_tp_mro(base))); } if (n == 1) { @@ -2152,7 +2477,8 @@ mro_implementation(PyTypeObject *type) * is trivial. */ PyTypeObject *base = _PyType_CAST(PyTuple_GET_ITEM(bases, 0)); - Py_ssize_t k = PyTuple_GET_SIZE(base->tp_mro); + PyObject *base_mro = lookup_tp_mro(base); + Py_ssize_t k = PyTuple_GET_SIZE(base_mro); PyObject *result = PyTuple_New(k + 1); if (result == NULL) { return NULL; @@ -2161,7 +2487,7 @@ mro_implementation(PyTypeObject *type) ; PyTuple_SET_ITEM(result, 0, Py_NewRef(type)); for (Py_ssize_t i = 0; i < k; i++) { - PyObject *cls = PyTuple_GET_ITEM(base->tp_mro, i); + PyObject *cls = PyTuple_GET_ITEM(base_mro, i); PyTuple_SET_ITEM(result, i + 1, Py_NewRef(cls)); } return result; @@ -2188,7 +2514,7 @@ mro_implementation(PyTypeObject *type) for (Py_ssize_t i = 0; i < n; i++) { PyTypeObject *base = _PyType_CAST(PyTuple_GET_ITEM(bases, i)); - to_merge[i] = base->tp_mro; + to_merge[i] = lookup_tp_mro(base); } to_merge[n] = bases; @@ -2343,9 +2669,9 @@ mro_internal(PyTypeObject *type, PyObject **p_old_mro) /* Keep a reference to be able to do a reentrancy check below. Don't let old_mro be GC'ed and its address be reused for another object, like (suddenly!) a new tp_mro. */ - old_mro = Py_XNewRef(type->tp_mro); + old_mro = Py_XNewRef(lookup_tp_mro(type)); new_mro = mro_invoke(type); /* might cause reentrance */ - reent = (type->tp_mro != old_mro); + reent = (lookup_tp_mro(type) != old_mro); Py_XDECREF(old_mro); if (new_mro == NULL) { return -1; @@ -2356,12 +2682,12 @@ mro_internal(PyTypeObject *type, PyObject **p_old_mro) return 0; } - type->tp_mro = new_mro; + set_tp_mro(type, new_mro); - type_mro_modified(type, type->tp_mro); + type_mro_modified(type, new_mro); /* corner case: the super class might have been hidden from the custom MRO */ - type_mro_modified(type, type->tp_bases); + type_mro_modified(type, lookup_tp_bases(type)); // XXX Expand this to Py_TPFLAGS_IMMUTABLETYPE? if (!(type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN)) { @@ -2963,7 +3289,7 @@ type_new_alloc(type_new_ctx *ctx) type->tp_as_mapping = &et->as_mapping; type->tp_as_buffer = &et->as_buffer; - type->tp_bases = Py_NewRef(ctx->bases); + set_tp_bases(type, Py_NewRef(ctx->bases)); type->tp_base = (PyTypeObject *)Py_NewRef(ctx->base); type->tp_dealloc = subtype_dealloc; @@ -3003,7 +3329,8 @@ type_new_set_name(const type_new_ctx *ctx, PyTypeObject *type) static int type_new_set_module(PyTypeObject *type) { - int r = PyDict_Contains(type->tp_dict, &_Py_ID(__module__)); + PyObject *dict = lookup_tp_dict(type); + int r = PyDict_Contains(dict, &_Py_ID(__module__)); if (r < 0) { return -1; } @@ -3024,7 +3351,7 @@ type_new_set_module(PyTypeObject *type) return 0; } - if (PyDict_SetItem(type->tp_dict, &_Py_ID(__module__), module) < 0) { + if (PyDict_SetItem(dict, &_Py_ID(__module__), module) < 0) { return -1; } return 0; @@ -3037,8 +3364,8 @@ static int type_new_set_ht_name(PyTypeObject *type) { PyHeapTypeObject *et = (PyHeapTypeObject *)type; - PyObject *qualname = PyDict_GetItemWithError( - type->tp_dict, &_Py_ID(__qualname__)); + PyObject *dict = lookup_tp_dict(type); + PyObject *qualname = PyDict_GetItemWithError(dict, &_Py_ID(__qualname__)); if (qualname != NULL) { if (!PyUnicode_Check(qualname)) { PyErr_Format(PyExc_TypeError, @@ -3047,7 +3374,7 @@ type_new_set_ht_name(PyTypeObject *type) return -1; } et->ht_qualname = Py_NewRef(qualname); - if (PyDict_DelItem(type->tp_dict, &_Py_ID(__qualname__)) < 0) { + if (PyDict_DelItem(dict, &_Py_ID(__qualname__)) < 0) { return -1; } } @@ -3067,7 +3394,8 @@ type_new_set_ht_name(PyTypeObject *type) static int type_new_set_doc(PyTypeObject *type) { - PyObject *doc = PyDict_GetItemWithError(type->tp_dict, &_Py_ID(__doc__)); + PyObject *dict = lookup_tp_dict(type); + PyObject *doc = PyDict_GetItemWithError(dict, &_Py_ID(__doc__)); if (doc == NULL) { if (PyErr_Occurred()) { return -1; @@ -3102,7 +3430,8 @@ type_new_set_doc(PyTypeObject *type) static int type_new_staticmethod(PyTypeObject *type, PyObject *attr) { - PyObject *func = PyDict_GetItemWithError(type->tp_dict, attr); + PyObject *dict = lookup_tp_dict(type); + PyObject *func = PyDict_GetItemWithError(dict, attr); if (func == NULL) { if (PyErr_Occurred()) { return -1; @@ -3117,7 +3446,7 @@ type_new_staticmethod(PyTypeObject *type, PyObject *attr) if (static_func == NULL) { return -1; } - if (PyDict_SetItem(type->tp_dict, attr, static_func) < 0) { + if (PyDict_SetItem(dict, attr, static_func) < 0) { Py_DECREF(static_func); return -1; } @@ -3129,7 +3458,8 @@ type_new_staticmethod(PyTypeObject *type, PyObject *attr) static int type_new_classmethod(PyTypeObject *type, PyObject *attr) { - PyObject *func = PyDict_GetItemWithError(type->tp_dict, attr); + PyObject *dict = lookup_tp_dict(type); + PyObject *func = PyDict_GetItemWithError(dict, attr); if (func == NULL) { if (PyErr_Occurred()) { return -1; @@ -3145,7 +3475,7 @@ type_new_classmethod(PyTypeObject *type, PyObject *attr) return -1; } - if (PyDict_SetItem(type->tp_dict, attr, method) < 0) { + if (PyDict_SetItem(dict, attr, method) < 0) { Py_DECREF(method); return -1; } @@ -3231,8 +3561,8 @@ type_new_set_slots(const type_new_ctx *ctx, PyTypeObject *type) static int type_new_set_classcell(PyTypeObject *type) { - PyObject *cell = PyDict_GetItemWithError( - type->tp_dict, &_Py_ID(__classcell__)); + PyObject *dict = lookup_tp_dict(type); + PyObject *cell = PyDict_GetItemWithError(dict, &_Py_ID(__classcell__)); if (cell == NULL) { if (PyErr_Occurred()) { return -1; @@ -3249,7 +3579,7 @@ type_new_set_classcell(PyTypeObject *type) } (void)PyCell_Set(cell, (PyObject *) type); - if (PyDict_DelItem(type->tp_dict, &_Py_ID(__classcell__)) < 0) { + if (PyDict_DelItem(dict, &_Py_ID(__classcell__)) < 0) { return -1; } return 0; @@ -3356,7 +3686,7 @@ type_new_init(type_new_ctx *ctx) goto error; } - type->tp_dict = dict; + set_tp_dict(type, dict); PyHeapTypeObject *et = (PyHeapTypeObject*)type; et->ht_slots = ctx->slots; @@ -3556,6 +3886,15 @@ static const PySlot_Offset pyslot_offsets[] = { #include "typeslots.inc" }; +/* Align up to the nearest multiple of alignof(max_align_t) + * (like _Py_ALIGN_UP, but for a size rather than pointer) + */ +static Py_ssize_t +_align_up(Py_ssize_t size) +{ + return (size + ALIGNOF_MAX_ALIGN_T - 1) & ~(ALIGNOF_MAX_ALIGN_T - 1); +} + /* Given a PyType_FromMetaclass `bases` argument (NULL, type, or tuple of * types), return a tuple of types. */ @@ -3633,9 +3972,10 @@ check_basicsize_includes_size_and_offsets(PyTypeObject* type) return 1; } -PyObject * -PyType_FromMetaclass(PyTypeObject *metaclass, PyObject *module, - PyType_Spec *spec, PyObject *bases_in) +static PyObject * +_PyType_FromMetaclass_impl( + PyTypeObject *metaclass, PyObject *module, + PyType_Spec *spec, PyObject *bases_in, int _allow_tp_new) { /* Invariant: A non-NULL value in one of these means this function holds * a strong reference or owns allocated memory. @@ -3695,6 +4035,20 @@ PyType_FromMetaclass(PyTypeObject *metaclass, PyObject *module, assert(memb->flags == READONLY); vectorcalloffset = memb->offset; } + if (memb->flags & Py_RELATIVE_OFFSET) { + if (spec->basicsize > 0) { + PyErr_SetString( + PyExc_SystemError, + "With Py_RELATIVE_OFFSET, basicsize must be negative."); + goto finally; + } + if (memb->offset < 0 || memb->offset >= -spec->basicsize) { + PyErr_SetString( + PyExc_SystemError, + "Member offset out of range (0..-basicsize)"); + goto finally; + } + } } break; case Py_tp_doc: @@ -3810,9 +4164,21 @@ PyType_FromMetaclass(PyTypeObject *metaclass, PyObject *module, goto finally; } if (metaclass->tp_new != PyType_Type.tp_new) { - PyErr_SetString(PyExc_TypeError, - "Metaclasses with custom tp_new are not supported."); - goto finally; + if (_allow_tp_new) { + if (PyErr_WarnFormat( + PyExc_DeprecationWarning, 1, + "Using PyType_Spec with metaclasses that have custom " + "tp_new is deprecated and will no longer be allowed in " + "Python 3.14.") < 0) { + goto finally; + } + } + else { + PyErr_SetString( + PyExc_TypeError, + "Metaclasses with custom tp_new are not supported."); + goto finally; + } } /* Calculate best base, and check that all bases are type objects */ @@ -3824,6 +4190,32 @@ PyType_FromMetaclass(PyTypeObject *metaclass, PyObject *module, // here we just check its work assert(_PyType_HasFeature(base, Py_TPFLAGS_BASETYPE)); + /* Calculate sizes */ + + Py_ssize_t basicsize = spec->basicsize; + Py_ssize_t type_data_offset = spec->basicsize; + if (basicsize == 0) { + /* Inherit */ + basicsize = base->tp_basicsize; + } + else if (basicsize < 0) { + /* Extend */ + type_data_offset = _align_up(base->tp_basicsize); + basicsize = type_data_offset + _align_up(-spec->basicsize); + + /* Inheriting variable-sized types is limited */ + if (base->tp_itemsize + && !((base->tp_flags | spec->flags) & Py_TPFLAGS_ITEMS_AT_END)) + { + PyErr_SetString( + PyExc_SystemError, + "Cannot extend variable-size class without Py_TPFLAGS_ITEMS_AT_END."); + goto finally; + } + } + + Py_ssize_t itemsize = spec->itemsize; + /* Allocate the new type * * Between here and PyType_Ready, we should limit: @@ -3855,7 +4247,7 @@ PyType_FromMetaclass(PyTypeObject *metaclass, PyObject *module, /* Set slots we have prepared */ type->tp_base = (PyTypeObject *)Py_NewRef(base); - type->tp_bases = bases; + set_tp_bases(type, bases); bases = NULL; // We give our reference to bases to the type type->tp_doc = tp_doc; @@ -3871,8 +4263,8 @@ PyType_FromMetaclass(PyTypeObject *metaclass, PyObject *module, /* Copy the sizes */ - type->tp_basicsize = spec->basicsize; - type->tp_itemsize = spec->itemsize; + type->tp_basicsize = basicsize; + type->tp_itemsize = itemsize; /* Copy all the ordinary slots */ @@ -3889,6 +4281,16 @@ PyType_FromMetaclass(PyTypeObject *metaclass, PyObject *module, size_t len = Py_TYPE(type)->tp_itemsize * nmembers; memcpy(_PyHeapType_GET_MEMBERS(res), slot->pfunc, len); type->tp_members = _PyHeapType_GET_MEMBERS(res); + PyMemberDef *memb; + Py_ssize_t i; + for (memb = _PyHeapType_GET_MEMBERS(res), i = nmembers; + i > 0; ++memb, --i) + { + if (memb->flags & Py_RELATIVE_OFFSET) { + memb->flags &= ~Py_RELATIVE_OFFSET; + memb->offset += type_data_offset; + } + } } break; default: @@ -3897,6 +4299,7 @@ PyType_FromMetaclass(PyTypeObject *metaclass, PyObject *module, PySlot_Offset slotoffsets = pyslot_offsets[slot->slot]; short slot_offset = slotoffsets.slot_offset; if (slotoffsets.subslot_offset == -1) { + /* Set a slot in the main PyTypeObject */ *(void**)((char*)res_start + slot_offset) = slot->pfunc; } else { @@ -3935,12 +4338,13 @@ PyType_FromMetaclass(PyTypeObject *metaclass, PyObject *module, goto finally; } + PyObject *dict = lookup_tp_dict(type); if (type->tp_doc) { PyObject *__doc__ = PyUnicode_FromString(_PyType_DocWithoutSignature(type->tp_name, type->tp_doc)); if (!__doc__) { goto finally; } - r = PyDict_SetItem(type->tp_dict, &_Py_ID(__doc__), __doc__); + r = PyDict_SetItem(dict, &_Py_ID(__doc__), __doc__); Py_DECREF(__doc__); if (r < 0) { goto finally; @@ -3948,18 +4352,18 @@ PyType_FromMetaclass(PyTypeObject *metaclass, PyObject *module, } if (weaklistoffset) { - if (PyDict_DelItem((PyObject *)type->tp_dict, &_Py_ID(__weaklistoffset__)) < 0) { + if (PyDict_DelItem(dict, &_Py_ID(__weaklistoffset__)) < 0) { goto finally; } } if (dictoffset) { - if (PyDict_DelItem((PyObject *)type->tp_dict, &_Py_ID(__dictoffset__)) < 0) { + if (PyDict_DelItem(dict, &_Py_ID(__dictoffset__)) < 0) { goto finally; } } /* Set type.__module__ */ - r = PyDict_Contains(type->tp_dict, &_Py_ID(__module__)); + r = PyDict_Contains(dict, &_Py_ID(__module__)); if (r < 0) { goto finally; } @@ -3971,7 +4375,7 @@ PyType_FromMetaclass(PyTypeObject *metaclass, PyObject *module, if (modname == NULL) { goto finally; } - r = PyDict_SetItem(type->tp_dict, &_Py_ID(__module__), modname); + r = PyDict_SetItem(dict, &_Py_ID(__module__), modname); Py_DECREF(modname); if (r != 0) { goto finally; @@ -3998,22 +4402,29 @@ PyType_FromMetaclass(PyTypeObject *metaclass, PyObject *module, return (PyObject*)res; } +PyObject * +PyType_FromMetaclass(PyTypeObject *metaclass, PyObject *module, + PyType_Spec *spec, PyObject *bases_in) +{ + return _PyType_FromMetaclass_impl(metaclass, module, spec, bases_in, 0); +} + PyObject * PyType_FromModuleAndSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) { - return PyType_FromMetaclass(NULL, module, spec, bases); + return _PyType_FromMetaclass_impl(NULL, module, spec, bases, 1); } PyObject * PyType_FromSpecWithBases(PyType_Spec *spec, PyObject *bases) { - return PyType_FromMetaclass(NULL, NULL, spec, bases); + return _PyType_FromMetaclass_impl(NULL, NULL, spec, bases, 1); } PyObject * PyType_FromSpec(PyType_Spec *spec) { - return PyType_FromMetaclass(NULL, NULL, spec, NULL); + return _PyType_FromMetaclass_impl(NULL, NULL, spec, NULL, 1); } PyObject * @@ -4093,7 +4504,7 @@ PyType_GetModuleByDef(PyTypeObject *type, PyModuleDef *def) { assert(PyType_Check(type)); - PyObject *mro = type->tp_mro; + PyObject *mro = lookup_tp_mro(type); // The type must be ready assert(mro != NULL); assert(PyTuple_Check(mro)); @@ -4123,6 +4534,34 @@ PyType_GetModuleByDef(PyTypeObject *type, PyModuleDef *def) return NULL; } +void * +PyObject_GetTypeData(PyObject *obj, PyTypeObject *cls) +{ + assert(PyObject_TypeCheck(obj, cls)); + return (char *)obj + _align_up(cls->tp_base->tp_basicsize); +} + +Py_ssize_t +PyType_GetTypeDataSize(PyTypeObject *cls) +{ + ptrdiff_t result = cls->tp_basicsize - _align_up(cls->tp_base->tp_basicsize); + if (result < 0) { + return 0; + } + return result; +} + +void * +PyObject_GetItemData(PyObject *obj) +{ + if (!PyType_HasFeature(Py_TYPE(obj), Py_TPFLAGS_ITEMS_AT_END)) { + PyErr_Format(PyExc_TypeError, + "type '%s' does not have Py_TPFLAGS_ITEMS_AT_END", + Py_TYPE(obj)->tp_name); + return NULL; + } + return (char *)obj + Py_TYPE(obj)->tp_basicsize; +} /* Internal API to look for a name through the MRO, bypassing the method cache. This returns a borrowed reference, and might set an exception. @@ -4142,14 +4581,14 @@ find_name_in_mro(PyTypeObject *type, PyObject *name, int *error) } /* Look in tp_dict of types in MRO */ - PyObject *mro = type->tp_mro; + PyObject *mro = lookup_tp_mro(type); if (mro == NULL) { - if ((type->tp_flags & Py_TPFLAGS_READYING) == 0) { + if (!is_readying(type)) { if (PyType_Ready(type) < 0) { *error = -1; return NULL; } - mro = type->tp_mro; + mro = lookup_tp_mro(type); } if (mro == NULL) { *error = 1; @@ -4164,7 +4603,7 @@ find_name_in_mro(PyTypeObject *type, PyObject *name, int *error) Py_ssize_t n = PyTuple_GET_SIZE(mro); for (Py_ssize_t i = 0; i < n; i++) { PyObject *base = PyTuple_GET_ITEM(mro, i); - PyObject *dict = _PyType_CAST(base)->tp_dict; + PyObject *dict = lookup_tp_dict(_PyType_CAST(base)); assert(dict && PyDict_Check(dict)); res = _PyDict_GetItem_KnownHash(dict, name, hash); if (res != NULL) { @@ -4438,20 +4877,19 @@ _PyDictKeys_DecRef(PyDictKeysObject *keys); static void type_dealloc_common(PyTypeObject *type) { - if (type->tp_bases != NULL) { + PyObject *bases = lookup_tp_bases(type); + if (bases != NULL) { PyObject *exc = PyErr_GetRaisedException(); - remove_all_subclasses(type, type->tp_bases); + remove_all_subclasses(type, bases); PyErr_SetRaisedException(exc); } } -static void clear_subclasses(PyTypeObject *self); - static void clear_static_tp_subclasses(PyTypeObject *type) { - PyObject *subclasses = lookup_subclasses(type); + PyObject *subclasses = lookup_tp_subclasses(type); if (subclasses == NULL) { return; } @@ -4480,7 +4918,7 @@ clear_static_tp_subclasses(PyTypeObject *type) Py_ssize_t i = 0; PyObject *key, *ref; // borrowed ref while (PyDict_Next(subclasses, &i, &key, &ref)) { - PyTypeObject *subclass = subclass_from_ref(ref); // borrowed + PyTypeObject *subclass = type_from_ref(ref); // borrowed if (subclass == NULL) { continue; } @@ -4488,36 +4926,40 @@ clear_static_tp_subclasses(PyTypeObject *type) assert(!(subclass->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN)); } - clear_subclasses(type); + clear_tp_subclasses(type); +} + +static void +clear_static_type_objects(PyInterpreterState *interp, PyTypeObject *type) +{ + if (_Py_IsMainInterpreter(interp)) { + Py_CLEAR(type->tp_cache); + } + clear_tp_dict(type); + clear_tp_bases(type); + clear_tp_mro(type); + clear_static_tp_subclasses(type); } void -_PyStaticType_Dealloc(PyTypeObject *type) +_PyStaticType_Dealloc(PyInterpreterState *interp, PyTypeObject *type) { - assert(!(type->tp_flags & Py_TPFLAGS_HEAPTYPE)); + assert(type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN); + assert(_Py_IsImmortal((PyObject *)type)); type_dealloc_common(type); - Py_CLEAR(type->tp_dict); - Py_CLEAR(type->tp_bases); - Py_CLEAR(type->tp_mro); - Py_CLEAR(type->tp_cache); - clear_static_tp_subclasses(type); + clear_static_type_objects(interp, type); - // PyObject_ClearWeakRefs() raises an exception if Py_REFCNT() != 0 - if (Py_REFCNT(type) == 0) { - PyObject_ClearWeakRefs((PyObject *)type); + if (_Py_IsMainInterpreter(interp)) { + type->tp_flags &= ~Py_TPFLAGS_READY; + type->tp_flags &= ~Py_TPFLAGS_VALID_VERSION_TAG; + type->tp_version_tag = 0; } - type->tp_flags &= ~Py_TPFLAGS_READY; - type->tp_flags &= ~Py_TPFLAGS_VALID_VERSION_TAG; - type->tp_version_tag = 0; - - if (type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { - _PyStaticType_ClearWeakRefs(type); - static_builtin_state_clear(type); - /* We leave _Py_TPFLAGS_STATIC_BUILTIN set on tp_flags. */ - } + _PyStaticType_ClearWeakRefs(interp, type); + static_builtin_state_clear(interp, type); + /* We leave _Py_TPFLAGS_STATIC_BUILTIN set on tp_flags. */ } @@ -4529,91 +4971,34 @@ type_dealloc(PyTypeObject *type) _PyObject_GC_UNTRACK(type); - type_dealloc_common(type); - - // PyObject_ClearWeakRefs() raises an exception if Py_REFCNT() != 0 - assert(Py_REFCNT(type) == 0); - PyObject_ClearWeakRefs((PyObject *)type); - - Py_XDECREF(type->tp_base); - Py_XDECREF(type->tp_dict); - Py_XDECREF(type->tp_bases); - Py_XDECREF(type->tp_mro); - Py_XDECREF(type->tp_cache); - clear_subclasses(type); - - /* A type's tp_doc is heap allocated, unlike the tp_doc slots - * of most other objects. It's okay to cast it to char *. - */ - PyObject_Free((char *)type->tp_doc); - - PyHeapTypeObject *et = (PyHeapTypeObject *)type; - Py_XDECREF(et->ht_name); - Py_XDECREF(et->ht_qualname); - Py_XDECREF(et->ht_slots); - if (et->ht_cached_keys) { - _PyDictKeys_DecRef(et->ht_cached_keys); - } - Py_XDECREF(et->ht_module); - PyMem_Free(et->_ht_tpname); - Py_TYPE(type)->tp_free((PyObject *)type); -} - - -static PyObject * -lookup_subclasses(PyTypeObject *self) -{ - if (self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { - static_builtin_state *state = _PyStaticType_GetState(self); - assert(state != NULL); - return state->tp_subclasses; - } - return (PyObject *)self->tp_subclasses; -} - -int -_PyType_HasSubclasses(PyTypeObject *self) -{ - if (self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN && - _PyStaticType_GetState(self) == NULL) { - return 0; - } - if (lookup_subclasses(self) == NULL) { - return 0; - } - return 1; -} - -PyObject* -_PyType_GetSubclasses(PyTypeObject *self) -{ - PyObject *list = PyList_New(0); - if (list == NULL) { - return NULL; - } - - PyObject *subclasses = lookup_subclasses(self); // borrowed ref - if (subclasses == NULL) { - return list; - } - assert(PyDict_CheckExact(subclasses)); - // The loop cannot modify tp_subclasses, there is no need - // to hold a strong reference (use a borrowed reference). + type_dealloc_common(type); - Py_ssize_t i = 0; - PyObject *ref; // borrowed ref - while (PyDict_Next(subclasses, &i, NULL, &ref)) { - PyTypeObject *subclass = subclass_from_ref(ref); // borrowed - if (subclass == NULL) { - continue; - } + // PyObject_ClearWeakRefs() raises an exception if Py_REFCNT() != 0 + assert(Py_REFCNT(type) == 0); + PyObject_ClearWeakRefs((PyObject *)type); - if (PyList_Append(list, _PyObject_CAST(subclass)) < 0) { - Py_DECREF(list); - return NULL; - } + Py_XDECREF(type->tp_base); + Py_XDECREF(type->tp_dict); + Py_XDECREF(type->tp_bases); + Py_XDECREF(type->tp_mro); + Py_XDECREF(type->tp_cache); + clear_tp_subclasses(type); + + /* A type's tp_doc is heap allocated, unlike the tp_doc slots + * of most other objects. It's okay to cast it to char *. + */ + PyObject_Free((char *)type->tp_doc); + + PyHeapTypeObject *et = (PyHeapTypeObject *)type; + Py_XDECREF(et->ht_name); + Py_XDECREF(et->ht_qualname); + Py_XDECREF(et->ht_slots); + if (et->ht_cached_keys) { + _PyDictKeys_DecRef(et->ht_cached_keys); } - return list; + Py_XDECREF(et->ht_module); + PyMem_Free(et->_ht_tpname); + Py_TYPE(type)->tp_free((PyObject *)type); } @@ -4830,8 +5215,9 @@ type_clear(PyTypeObject *type) */ PyType_Modified(type); - if (type->tp_dict) { - PyDict_Clear(type->tp_dict); + PyObject *dict = lookup_tp_dict(type); + if (dict) { + PyDict_Clear(dict); } Py_CLEAR(((PyHeapTypeObject *)type)->ht_module); @@ -4873,7 +5259,8 @@ PyTypeObject PyType_Type = { 0, /* tp_as_buffer */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_TYPE_SUBCLASS | - Py_TPFLAGS_HAVE_VECTORCALL, /* tp_flags */ + Py_TPFLAGS_HAVE_VECTORCALL | + Py_TPFLAGS_ITEMS_AT_END, /* tp_flags */ type_doc, /* tp_doc */ (traverseproc)type_traverse, /* tp_traverse */ (inquiry)type_clear, /* tp_clear */ @@ -5380,7 +5767,8 @@ _PyType_GetSlotNames(PyTypeObject *cls) assert(PyType_Check(cls)); /* Get the slot names from the cache in the class if possible. */ - slotnames = PyDict_GetItemWithError(cls->tp_dict, &_Py_ID(__slotnames__)); + PyObject *dict = lookup_tp_dict(cls); + slotnames = PyDict_GetItemWithError(dict, &_Py_ID(__slotnames__)); if (slotnames != NULL) { if (slotnames != Py_None && !PyList_Check(slotnames)) { PyErr_Format(PyExc_TypeError, @@ -5880,8 +6268,8 @@ object___reduce_ex___impl(PyObject *self, int protocol) PyObject *reduce, *res; if (objreduce == NULL) { - objreduce = PyDict_GetItemWithError( - PyBaseObject_Type.tp_dict, &_Py_ID(__reduce__)); + PyObject *dict = lookup_tp_dict(&PyBaseObject_Type); + objreduce = PyDict_GetItemWithError(dict, &_Py_ID(__reduce__)); if (objreduce == NULL && PyErr_Occurred()) { return NULL; } @@ -6147,11 +6535,12 @@ type_add_method(PyTypeObject *type, PyMethodDef *meth) } int err; + PyObject *dict = lookup_tp_dict(type); if (!(meth->ml_flags & METH_COEXIST)) { - err = PyDict_SetDefault(type->tp_dict, name, descr) == NULL; + err = PyDict_SetDefault(dict, name, descr) == NULL; } else { - err = PyDict_SetItem(type->tp_dict, name, descr) < 0; + err = PyDict_SetItem(dict, name, descr) < 0; } if (!isdescr) { Py_DECREF(name); @@ -6190,7 +6579,7 @@ type_add_members(PyTypeObject *type) return 0; } - PyObject *dict = type->tp_dict; + PyObject *dict = lookup_tp_dict(type); for (; memb->name != NULL; memb++) { PyObject *descr = PyDescr_NewMember(type, memb); if (descr == NULL) @@ -6214,7 +6603,7 @@ type_add_getset(PyTypeObject *type) return 0; } - PyObject *dict = type->tp_dict; + PyObject *dict = lookup_tp_dict(type); for (; gsp->name != NULL; gsp++) { PyObject *descr = PyDescr_NewGetSet(type, gsp); if (descr == NULL) { @@ -6285,15 +6674,20 @@ inherit_special(PyTypeObject *type, PyTypeObject *base) else if (PyType_IsSubtype(base, &PyDict_Type)) { type->tp_flags |= Py_TPFLAGS_DICT_SUBCLASS; } + + /* Setup some inheritable flags */ if (PyType_HasFeature(base, _Py_TPFLAGS_MATCH_SELF)) { type->tp_flags |= _Py_TPFLAGS_MATCH_SELF; } + if (PyType_HasFeature(base, Py_TPFLAGS_ITEMS_AT_END)) { + type->tp_flags |= Py_TPFLAGS_ITEMS_AT_END; + } } static int overrides_hash(PyTypeObject *type) { - PyObject *dict = type->tp_dict; + PyObject *dict = lookup_tp_dict(type); assert(dict != NULL); int r = PyDict_Contains(dict, &_Py_ID(__eq__)); @@ -6537,6 +6931,10 @@ type_ready_pre_checks(PyTypeObject *type) static int type_ready_set_bases(PyTypeObject *type) { + if (lookup_tp_bases(type) != NULL) { + return 0; + } + /* Initialize tp_base (defaults to BaseObject unless that's us) */ PyTypeObject *base = type->tp_base; if (base == NULL && type != &PyBaseObject_Type) { @@ -6572,7 +6970,7 @@ type_ready_set_bases(PyTypeObject *type) } /* Initialize tp_bases */ - PyObject *bases = type->tp_bases; + PyObject *bases = lookup_tp_bases(type); if (bases == NULL) { PyTypeObject *base = type->tp_base; if (base == NULL) { @@ -6584,7 +6982,7 @@ type_ready_set_bases(PyTypeObject *type) if (bases == NULL) { return -1; } - type->tp_bases = bases; + set_tp_bases(type, bases); } return 0; } @@ -6593,7 +6991,7 @@ type_ready_set_bases(PyTypeObject *type) static int type_ready_set_dict(PyTypeObject *type) { - if (type->tp_dict != NULL) { + if (lookup_tp_dict(type) != NULL) { return 0; } @@ -6601,7 +6999,7 @@ type_ready_set_dict(PyTypeObject *type) if (dict == NULL) { return -1; } - type->tp_dict = dict; + set_tp_dict(type, dict); return 0; } @@ -6611,7 +7009,8 @@ type_ready_set_dict(PyTypeObject *type) static int type_dict_set_doc(PyTypeObject *type) { - int r = PyDict_Contains(type->tp_dict, &_Py_ID(__doc__)); + PyObject *dict = lookup_tp_dict(type); + int r = PyDict_Contains(dict, &_Py_ID(__doc__)); if (r < 0) { return -1; } @@ -6627,14 +7026,14 @@ type_dict_set_doc(PyTypeObject *type) return -1; } - if (PyDict_SetItem(type->tp_dict, &_Py_ID(__doc__), doc) < 0) { + if (PyDict_SetItem(dict, &_Py_ID(__doc__), doc) < 0) { Py_DECREF(doc); return -1; } Py_DECREF(doc); } else { - if (PyDict_SetItem(type->tp_dict, &_Py_ID(__doc__), Py_None) < 0) { + if (PyDict_SetItem(dict, &_Py_ID(__doc__), Py_None) < 0) { return -1; } } @@ -6699,14 +7098,14 @@ type_ready_mro(PyTypeObject *type) if (mro_internal(type, NULL) < 0) { return -1; } - assert(type->tp_mro != NULL); - assert(PyTuple_Check(type->tp_mro)); + PyObject *mro = lookup_tp_mro(type); + assert(mro != NULL); + assert(PyTuple_Check(mro)); /* All bases of statically allocated type should be statically allocated, and static builtin types must have static builtin bases. */ if (!(type->tp_flags & Py_TPFLAGS_HEAPTYPE)) { assert(type->tp_flags & Py_TPFLAGS_IMMUTABLETYPE); - PyObject *mro = type->tp_mro; Py_ssize_t n = PyTuple_GET_SIZE(mro); for (Py_ssize_t i = 0; i < n; i++) { PyTypeObject *base = _PyType_CAST(PyTuple_GET_ITEM(mro, i)); @@ -6767,8 +7166,8 @@ type_ready_inherit(PyTypeObject *type) } // Inherit slots - PyObject *mro = type->tp_mro; - Py_ssize_t n = PyTuple_GET_SIZE(type->tp_mro); + PyObject *mro = lookup_tp_mro(type); + Py_ssize_t n = PyTuple_GET_SIZE(mro); for (Py_ssize_t i = 1; i < n; i++) { PyObject *b = PyTuple_GET_ITEM(mro, i); if (PyType_Check(b)) { @@ -6813,7 +7212,8 @@ type_ready_set_hash(PyTypeObject *type) return 0; } - int r = PyDict_Contains(type->tp_dict, &_Py_ID(__hash__)); + PyObject *dict = lookup_tp_dict(type); + int r = PyDict_Contains(dict, &_Py_ID(__hash__)); if (r < 0) { return -1; } @@ -6821,7 +7221,7 @@ type_ready_set_hash(PyTypeObject *type) return 0; } - if (PyDict_SetItem(type->tp_dict, &_Py_ID(__hash__), Py_None) < 0) { + if (PyDict_SetItem(dict, &_Py_ID(__hash__), Py_None) < 0) { return -1; } type->tp_hash = PyObject_HashNotImplemented; @@ -6833,7 +7233,7 @@ type_ready_set_hash(PyTypeObject *type) static int type_ready_add_subclasses(PyTypeObject *type) { - PyObject *bases = type->tp_bases; + PyObject *bases = lookup_tp_bases(type); Py_ssize_t nbase = PyTuple_GET_SIZE(bases); for (Py_ssize_t i = 0; i < nbase; i++) { PyObject *b = PyTuple_GET_ITEM(bases, i); @@ -6848,7 +7248,7 @@ type_ready_add_subclasses(PyTypeObject *type) // Set tp_new and the "__new__" key in the type dictionary. // Use the Py_TPFLAGS_DISALLOW_INSTANTIATION flag. static int -type_ready_set_new(PyTypeObject *type) +type_ready_set_new(PyTypeObject *type, int rerunbuiltin) { PyTypeObject *base = type->tp_base; /* The condition below could use some explanation. @@ -6870,10 +7270,12 @@ type_ready_set_new(PyTypeObject *type) if (!(type->tp_flags & Py_TPFLAGS_DISALLOW_INSTANTIATION)) { if (type->tp_new != NULL) { - // If "__new__" key does not exists in the type dictionary, - // set it to tp_new_wrapper(). - if (add_tp_new_wrapper(type) < 0) { - return -1; + if (!rerunbuiltin || base == NULL || type->tp_new != base->tp_new) { + // If "__new__" key does not exists in the type dictionary, + // set it to tp_new_wrapper(). + if (add_tp_new_wrapper(type) < 0) { + return -1; + } } } else { @@ -6938,7 +7340,8 @@ type_ready_post_checks(PyTypeObject *type) else if (type->tp_dictoffset < (Py_ssize_t)sizeof(PyObject)) { if (type->tp_dictoffset + type->tp_basicsize <= 0) { PyErr_Format(PyExc_SystemError, - "type %s has a tp_dictoffset that is too small"); + "type %s has a tp_dictoffset that is too small", + type->tp_name); } } return 0; @@ -6946,11 +7349,10 @@ type_ready_post_checks(PyTypeObject *type) static int -type_ready(PyTypeObject *type) +type_ready(PyTypeObject *type, int rerunbuiltin) { - _PyObject_ASSERT((PyObject *)type, - (type->tp_flags & Py_TPFLAGS_READYING) == 0); - type->tp_flags |= Py_TPFLAGS_READYING; + _PyObject_ASSERT((PyObject *)type, !is_readying(type)); + start_readying(type); if (type_ready_pre_checks(type) < 0) { goto error; @@ -6975,17 +7377,19 @@ type_ready(PyTypeObject *type) if (type_ready_mro(type) < 0) { goto error; } - if (type_ready_set_new(type) < 0) { + if (type_ready_set_new(type, rerunbuiltin) < 0) { goto error; } if (type_ready_fill_dict(type) < 0) { goto error; } - if (type_ready_inherit(type) < 0) { - goto error; - } - if (type_ready_preheader(type) < 0) { - goto error; + if (!rerunbuiltin) { + if (type_ready_inherit(type) < 0) { + goto error; + } + if (type_ready_preheader(type) < 0) { + goto error; + } } if (type_ready_set_hash(type) < 0) { goto error; @@ -6993,21 +7397,24 @@ type_ready(PyTypeObject *type) if (type_ready_add_subclasses(type) < 0) { goto error; } - if (type_ready_managed_dict(type) < 0) { - goto error; - } - if (type_ready_post_checks(type) < 0) { - goto error; + if (!rerunbuiltin) { + if (type_ready_managed_dict(type) < 0) { + goto error; + } + if (type_ready_post_checks(type) < 0) { + goto error; + } } /* All done -- set the ready flag */ - type->tp_flags = (type->tp_flags & ~Py_TPFLAGS_READYING) | Py_TPFLAGS_READY; + type->tp_flags = type->tp_flags | Py_TPFLAGS_READY; + stop_readying(type); assert(_PyType_CheckConsistency(type)); return 0; error: - type->tp_flags &= ~Py_TPFLAGS_READYING; + stop_readying(type); return -1; } @@ -7025,67 +7432,44 @@ PyType_Ready(PyTypeObject *type) type->tp_flags |= Py_TPFLAGS_IMMUTABLETYPE; } - return type_ready(type); + return type_ready(type, 0); } int -_PyStaticType_InitBuiltin(PyTypeObject *self) +_PyStaticType_InitBuiltin(PyInterpreterState *interp, PyTypeObject *self) { + assert(_Py_IsImmortal((PyObject *)self)); assert(!(self->tp_flags & Py_TPFLAGS_HEAPTYPE)); + assert(!(self->tp_flags & Py_TPFLAGS_MANAGED_DICT)); + assert(!(self->tp_flags & Py_TPFLAGS_MANAGED_WEAKREF)); - if (self->tp_flags & Py_TPFLAGS_READY) { - assert(self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN); - assert(_PyType_CheckConsistency(self)); - return 0; - } + int ismain = _Py_IsMainInterpreter(interp); + if ((self->tp_flags & Py_TPFLAGS_READY) == 0) { + assert(ismain); - self->tp_flags |= _Py_TPFLAGS_STATIC_BUILTIN; - self->tp_flags |= Py_TPFLAGS_IMMUTABLETYPE; + self->tp_flags |= _Py_TPFLAGS_STATIC_BUILTIN; + self->tp_flags |= Py_TPFLAGS_IMMUTABLETYPE; - assert(NEXT_GLOBAL_VERSION_TAG <= _Py_MAX_GLOBAL_TYPE_VERSION_TAG); - self->tp_version_tag = NEXT_GLOBAL_VERSION_TAG++; - self->tp_flags |= Py_TPFLAGS_VALID_VERSION_TAG; + assert(NEXT_GLOBAL_VERSION_TAG <= _Py_MAX_GLOBAL_TYPE_VERSION_TAG); + self->tp_version_tag = NEXT_GLOBAL_VERSION_TAG++; + self->tp_flags |= Py_TPFLAGS_VALID_VERSION_TAG; + } + else { + assert(!ismain); + assert(self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN); + assert(self->tp_flags & Py_TPFLAGS_VALID_VERSION_TAG); + } - static_builtin_state_init(self); + static_builtin_state_init(interp, self); - int res = type_ready(self); + int res = type_ready(self, !ismain); if (res < 0) { - static_builtin_state_clear(self); + static_builtin_state_clear(interp, self); } return res; } -static PyObject * -init_subclasses(PyTypeObject *self) -{ - PyObject *subclasses = PyDict_New(); - if (subclasses == NULL) { - return NULL; - } - if (self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { - static_builtin_state *state = _PyStaticType_GetState(self); - state->tp_subclasses = subclasses; - return subclasses; - } - self->tp_subclasses = (void *)subclasses; - return subclasses; -} - -static void -clear_subclasses(PyTypeObject *self) -{ - /* Delete the dictionary to save memory. _PyStaticType_Dealloc() - callers also test if tp_subclasses is NULL to check if a static type - has no subclass. */ - if (self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) { - static_builtin_state *state = _PyStaticType_GetState(self); - Py_CLEAR(state->tp_subclasses); - return; - } - Py_CLEAR(self->tp_subclasses); -} - static int add_subclass(PyTypeObject *base, PyTypeObject *type) { @@ -7102,9 +7486,9 @@ add_subclass(PyTypeObject *base, PyTypeObject *type) // Only get tp_subclasses after creating the key and value. // PyWeakref_NewRef() can trigger a garbage collection which can execute // arbitrary Python code and so modify base->tp_subclasses. - PyObject *subclasses = lookup_subclasses(base); + PyObject *subclasses = lookup_tp_subclasses(base); if (subclasses == NULL) { - subclasses = init_subclasses(base); + subclasses = init_tp_subclasses(base); if (subclasses == NULL) { Py_DECREF(key); Py_DECREF(ref); @@ -7135,19 +7519,6 @@ add_all_subclasses(PyTypeObject *type, PyObject *bases) return res; } -static inline PyTypeObject * -subclass_from_ref(PyObject *ref) -{ - assert(PyWeakref_CheckRef(ref)); - PyObject *obj = PyWeakref_GET_OBJECT(ref); // borrowed ref - assert(obj != NULL); - if (obj == Py_None) { - return NULL; - } - assert(PyType_Check(obj)); - return _PyType_CAST(obj); -} - static PyObject * get_subclasses_key(PyTypeObject *type, PyTypeObject *base) { @@ -7161,10 +7532,10 @@ get_subclasses_key(PyTypeObject *type, PyTypeObject *base) We fall back to manually traversing the values. */ Py_ssize_t i = 0; PyObject *ref; // borrowed ref - PyObject *subclasses = lookup_subclasses(base); + PyObject *subclasses = lookup_tp_subclasses(base); if (subclasses != NULL) { while (PyDict_Next(subclasses, &i, &key, &ref)) { - PyTypeObject *subclass = subclass_from_ref(ref); // borrowed + PyTypeObject *subclass = type_from_ref(ref); // borrowed if (subclass == type) { return Py_NewRef(key); } @@ -7177,7 +7548,7 @@ get_subclasses_key(PyTypeObject *type, PyTypeObject *base) static void remove_subclass(PyTypeObject *base, PyTypeObject *type) { - PyObject *subclasses = lookup_subclasses(base); // borrowed ref + PyObject *subclasses = lookup_tp_subclasses(base); // borrowed ref if (subclasses == NULL) { return; } @@ -7193,7 +7564,7 @@ remove_subclass(PyTypeObject *base, PyTypeObject *type) Py_XDECREF(key); if (PyDict_Size(subclasses) == 0) { - clear_subclasses(base); + clear_tp_subclasses(base); } } @@ -7491,7 +7862,7 @@ static int hackcheck(PyObject *self, setattrofunc func, const char *what) { PyTypeObject *type = Py_TYPE(self); - PyObject *mro = type->tp_mro; + PyObject *mro = lookup_tp_mro(type); if (!mro) { /* Probably ok not to check the call in this case. */ return 1; @@ -7694,6 +8065,63 @@ wrap_descr_delete(PyObject *self, PyObject *args, void *wrapped) Py_RETURN_NONE; } +static PyObject * +wrap_buffer(PyObject *self, PyObject *args, void *wrapped) +{ + PyObject *arg = NULL; + + if (!PyArg_UnpackTuple(args, "", 1, 1, &arg)) { + return NULL; + } + Py_ssize_t flags = PyNumber_AsSsize_t(arg, PyExc_OverflowError); + if (flags == -1 && PyErr_Occurred()) { + return NULL; + } + if (flags > INT_MAX) { + PyErr_SetString(PyExc_OverflowError, + "buffer flags too large"); + return NULL; + } + + return _PyMemoryView_FromBufferProc(self, Py_SAFE_DOWNCAST(flags, Py_ssize_t, int), + (getbufferproc)wrapped); +} + +static PyObject * +wrap_releasebuffer(PyObject *self, PyObject *args, void *wrapped) +{ + PyObject *arg = NULL; + if (!PyArg_UnpackTuple(args, "", 1, 1, &arg)) { + return NULL; + } + if (!PyMemoryView_Check(arg)) { + PyErr_SetString(PyExc_TypeError, + "expected a memoryview object"); + return NULL; + } + PyMemoryViewObject *mview = (PyMemoryViewObject *)arg; + if (mview->view.obj == NULL) { + // Already released, ignore + Py_RETURN_NONE; + } + if (mview->view.obj != self) { + PyErr_SetString(PyExc_ValueError, + "memoryview's buffer is not this object"); + return NULL; + } + if (mview->flags & _Py_MEMORYVIEW_RELEASED) { + PyErr_SetString(PyExc_ValueError, + "memoryview's buffer has already been released"); + return NULL; + } + PyObject *res = PyObject_CallMethodNoArgs((PyObject *)mview, &_Py_ID(release)); + if (res == NULL) { + return NULL; + } + Py_DECREF(res); + Py_RETURN_NONE; +} + static PyObject * wrap_init(PyObject *self, PyObject *args, void *wrapped, PyObject *kwds) { @@ -7779,7 +8207,8 @@ static struct PyMethodDef tp_new_methoddef[] = { static int add_tp_new_wrapper(PyTypeObject *type) { - int r = PyDict_Contains(type->tp_dict, &_Py_ID(__new__)); + PyObject *dict = lookup_tp_dict(type); + int r = PyDict_Contains(dict, &_Py_ID(__new__)); if (r > 0) { return 0; } @@ -7791,7 +8220,7 @@ add_tp_new_wrapper(PyTypeObject *type) if (func == NULL) { return -1; } - r = PyDict_SetItem(type->tp_dict, &_Py_ID(__new__), func); + r = PyDict_SetItem(dict, &_Py_ID(__new__), func); Py_DECREF(func); return r; } @@ -8529,6 +8958,227 @@ slot_tp_finalize(PyObject *self) PyErr_SetRaisedException(exc); } +typedef struct _PyBufferWrapper { + PyObject_HEAD + PyObject *mv; + PyObject *obj; +} PyBufferWrapper; + +static int +bufferwrapper_traverse(PyBufferWrapper *self, visitproc visit, void *arg) +{ + Py_VISIT(self->mv); + Py_VISIT(self->obj); + return 0; +} + +static void +bufferwrapper_dealloc(PyObject *self) +{ + PyBufferWrapper *bw = (PyBufferWrapper *)self; + + _PyObject_GC_UNTRACK(self); + Py_XDECREF(bw->mv); + Py_XDECREF(bw->obj); + Py_TYPE(self)->tp_free(self); +} + +static void +bufferwrapper_releasebuf(PyObject *self, Py_buffer *view) +{ + PyBufferWrapper *bw = (PyBufferWrapper *)self; + + if (bw->mv == NULL || bw->obj == NULL) { + // Already released + return; + } + + PyObject *mv = bw->mv; + PyObject *obj = bw->obj; + + assert(PyMemoryView_Check(mv)); + Py_TYPE(mv)->tp_as_buffer->bf_releasebuffer(mv, view); + // We only need to call bf_releasebuffer if it's a Python function. If it's a C + // bf_releasebuf, it will be called when the memoryview is released. + if (((PyMemoryViewObject *)mv)->view.obj != obj + && Py_TYPE(obj)->tp_as_buffer != NULL + && Py_TYPE(obj)->tp_as_buffer->bf_releasebuffer == slot_bf_releasebuffer) { + releasebuffer_call_python(obj, view); + } + + Py_CLEAR(bw->mv); + Py_CLEAR(bw->obj); +} + +static PyBufferProcs bufferwrapper_as_buffer = { + .bf_releasebuffer = bufferwrapper_releasebuf, +}; + + +PyTypeObject _PyBufferWrapper_Type = { + PyVarObject_HEAD_INIT(&PyType_Type, 0) + .tp_name = "_buffer_wrapper", + .tp_basicsize = sizeof(PyBufferWrapper), + .tp_alloc = PyType_GenericAlloc, + .tp_free = PyObject_GC_Del, + .tp_traverse = (traverseproc)bufferwrapper_traverse, + .tp_dealloc = bufferwrapper_dealloc, + .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, + .tp_as_buffer = &bufferwrapper_as_buffer, +}; + +static int +slot_bf_getbuffer(PyObject *self, Py_buffer *buffer, int flags) +{ + PyObject *flags_obj = PyLong_FromLong(flags); + if (flags_obj == NULL) { + return -1; + } + PyBufferWrapper *wrapper = NULL; + PyObject *stack[2] = {self, flags_obj}; + PyObject *ret = vectorcall_method(&_Py_ID(__buffer__), stack, 2); + if (ret == NULL) { + goto fail; + } + if (!PyMemoryView_Check(ret)) { + PyErr_Format(PyExc_TypeError, + "__buffer__ returned non-memoryview object"); + goto fail; + } + + if (PyObject_GetBuffer(ret, buffer, flags) < 0) { + goto fail; + } + assert(buffer->obj == ret); + + wrapper = PyObject_GC_New(PyBufferWrapper, &_PyBufferWrapper_Type); + if (wrapper == NULL) { + goto fail; + } + wrapper->mv = ret; + wrapper->obj = Py_NewRef(self); + _PyObject_GC_TRACK(wrapper); + + buffer->obj = (PyObject *)wrapper; + Py_DECREF(ret); + Py_DECREF(flags_obj); + return 0; + +fail: + Py_XDECREF(wrapper); + Py_XDECREF(ret); + Py_DECREF(flags_obj); + return -1; +} + +static int +releasebuffer_maybe_call_super(PyObject *self, Py_buffer *buffer) +{ + PyTypeObject *self_type = Py_TYPE(self); + PyObject *mro = lookup_tp_mro(self_type); + if (mro == NULL) { + return -1; + } + + assert(PyTuple_Check(mro)); + Py_ssize_t n = PyTuple_GET_SIZE(mro); + Py_ssize_t i; + + /* No need to check the last one: it's gonna be skipped anyway. */ + for (i = 0; i < n -1; i++) { + if ((PyObject *)(self_type) == PyTuple_GET_ITEM(mro, i)) + break; + } + i++; /* skip self_type */ + if (i >= n) + return -1; + + releasebufferproc base_releasebuffer = NULL; + for (; i < n; i++) { + PyObject *obj = PyTuple_GET_ITEM(mro, i); + if (!PyType_Check(obj)) { + continue; + } + PyTypeObject *base_type = (PyTypeObject *)obj; + if (base_type->tp_as_buffer != NULL + && base_type->tp_as_buffer->bf_releasebuffer != NULL + && base_type->tp_as_buffer->bf_releasebuffer != slot_bf_releasebuffer) { + base_releasebuffer = base_type->tp_as_buffer->bf_releasebuffer; + break; + } + } + + if (base_releasebuffer != NULL) { + base_releasebuffer(self, buffer); + } + return 0; +} + +static void +releasebuffer_call_python(PyObject *self, Py_buffer *buffer) +{ + PyObject *mv; + bool is_buffer_wrapper = Py_TYPE(buffer->obj) == &_PyBufferWrapper_Type; + if (is_buffer_wrapper) { + // Make sure we pass the same memoryview to + // __release_buffer__() that __buffer__() returned. + PyBufferWrapper *bw = (PyBufferWrapper *)buffer->obj; + if (bw->mv == NULL) { + return; + } + mv = Py_NewRef(bw->mv); + } + else { + // This means we are not dealing with a memoryview returned + // from a Python __buffer__ function. + mv = PyMemoryView_FromBuffer(buffer); + if (mv == NULL) { + PyErr_WriteUnraisable(self); + return; + } + // Set the memoryview to restricted mode, which forbids + // users from saving any reference to the underlying buffer + // (e.g., by doing .cast()). This is necessary to ensure + // no Python code retains a reference to the to-be-released + // buffer. + ((PyMemoryViewObject *)mv)->flags |= _Py_MEMORYVIEW_RESTRICTED; + } + PyObject *stack[2] = {self, mv}; + PyObject *ret = vectorcall_method(&_Py_ID(__release_buffer__), stack, 2); + if (ret == NULL) { + PyErr_WriteUnraisable(self); + } + else { + Py_DECREF(ret); + } + if (!is_buffer_wrapper) { + PyObject_CallMethodNoArgs(mv, &_Py_ID(release)); + } + Py_DECREF(mv); +} + +/* + * bf_releasebuffer is very delicate, because we need to ensure that + * C bf_releasebuffer slots are called correctly (or we'll leak memory), + * but we cannot trust any __release_buffer__ implemented in Python to + * do so correctly. Therefore, if a base class has a C bf_releasebuffer + * slot, we call it directly here. That is safe because this function + * only gets called from C callers of the bf_releasebuffer slot. Python + * code that calls __release_buffer__ directly instead goes through + * wrap_releasebuffer(), which doesn't call the bf_releasebuffer slot + * directly but instead simply releases the associated memoryview. + */ +static void +slot_bf_releasebuffer(PyObject *self, Py_buffer *buffer) +{ + releasebuffer_call_python(self, buffer); + if (releasebuffer_maybe_call_super(self, buffer) < 0) { + if (PyErr_Occurred()) { + PyErr_WriteUnraisable(self); + } + } +} + static PyObject * slot_am_await(PyObject *self) { @@ -8596,6 +9246,7 @@ an all-zero entry. #undef TPSLOT #undef FLSLOT +#undef BUFSLOT #undef AMSLOT #undef ETSLOT #undef SQSLOT @@ -8615,6 +9266,8 @@ an all-zero entry. #define ETSLOT(NAME, SLOT, FUNCTION, WRAPPER, DOC) \ {#NAME, offsetof(PyHeapTypeObject, SLOT), (void *)(FUNCTION), WRAPPER, \ PyDoc_STR(DOC), .name_strobj = &_Py_ID(NAME) } +#define BUFSLOT(NAME, SLOT, FUNCTION, WRAPPER, DOC) \ + ETSLOT(NAME, as_buffer.SLOT, FUNCTION, WRAPPER, DOC) #define AMSLOT(NAME, SLOT, FUNCTION, WRAPPER, DOC) \ ETSLOT(NAME, as_async.SLOT, FUNCTION, WRAPPER, DOC) #define SQSLOT(NAME, SLOT, FUNCTION, WRAPPER, DOC) \ @@ -8696,6 +9349,13 @@ static pytype_slotdef slotdefs[] = { "Create and return new object. See help(type) for accurate signature."), TPSLOT(__del__, tp_finalize, slot_tp_finalize, (wrapperfunc)wrap_del, ""), + BUFSLOT(__buffer__, bf_getbuffer, slot_bf_getbuffer, wrap_buffer, + "__buffer__($self, flags, /)\n--\n\n" + "Return a buffer object that exposes the underlying memory of the object."), + BUFSLOT(__release_buffer__, bf_releasebuffer, slot_bf_releasebuffer, wrap_releasebuffer, + "__release_buffer__($self, /)\n--\n\n" + "Release the buffer object that exposes the underlying memory of the object."), + AMSLOT(__await__, am_await, slot_am_await, wrap_unaryfunc, "__await__($self, /)\n--\n\nReturn an iterator to be used in await expression."), AMSLOT(__aiter__, am_aiter, slot_am_aiter, wrap_unaryfunc, @@ -8842,8 +9502,12 @@ slotptr(PyTypeObject *type, int ioffset) /* Note: this depends on the order of the members of PyHeapTypeObject! */ assert(offset >= 0); - assert((size_t)offset < offsetof(PyHeapTypeObject, as_buffer)); - if ((size_t)offset >= offsetof(PyHeapTypeObject, as_sequence)) { + assert((size_t)offset < offsetof(PyHeapTypeObject, ht_name)); + if ((size_t)offset >= offsetof(PyHeapTypeObject, as_buffer)) { + ptr = (char *)type->tp_as_buffer; + offset -= offsetof(PyHeapTypeObject, as_buffer); + } + else if ((size_t)offset >= offsetof(PyHeapTypeObject, as_sequence)) { ptr = (char *)type->tp_as_sequence; offset -= offsetof(PyHeapTypeObject, as_sequence); } @@ -9155,7 +9819,8 @@ update_all_slots(PyTypeObject* type) static int type_new_set_names(PyTypeObject *type) { - PyObject *names_to_set = PyDict_Copy(type->tp_dict); + PyObject *dict = lookup_tp_dict(type); + PyObject *names_to_set = PyDict_Copy(dict); if (names_to_set == NULL) { return -1; } @@ -9244,7 +9909,7 @@ recurse_down_subclasses(PyTypeObject *type, PyObject *attr_name, // It is safe to use a borrowed reference because update_subclasses() is // only used with update_slots_callback() which doesn't modify // tp_subclasses. - PyObject *subclasses = lookup_subclasses(type); // borrowed ref + PyObject *subclasses = lookup_tp_subclasses(type); // borrowed ref if (subclasses == NULL) { return 0; } @@ -9253,13 +9918,13 @@ recurse_down_subclasses(PyTypeObject *type, PyObject *attr_name, Py_ssize_t i = 0; PyObject *ref; while (PyDict_Next(subclasses, &i, NULL, &ref)) { - PyTypeObject *subclass = subclass_from_ref(ref); // borrowed + PyTypeObject *subclass = type_from_ref(ref); // borrowed if (subclass == NULL) { continue; } /* Avoid recursing down into unaffected classes */ - PyObject *dict = subclass->tp_dict; + PyObject *dict = lookup_tp_dict(subclass); if (dict != NULL && PyDict_Check(dict)) { int r = PyDict_Contains(dict, attr_name); if (r < 0) { @@ -9310,7 +9975,7 @@ recurse_down_subclasses(PyTypeObject *type, PyObject *attr_name, static int add_operators(PyTypeObject *type) { - PyObject *dict = type->tp_dict; + PyObject *dict = lookup_tp_dict(type); pytype_slotdef *p; PyObject *descr; void **ptr; @@ -9406,7 +10071,7 @@ _super_lookup_descr(PyTypeObject *su_type, PyTypeObject *su_obj_type, PyObject * PyObject *mro, *res; Py_ssize_t i, n; - mro = su_obj_type->tp_mro; + mro = lookup_tp_mro(su_obj_type); if (mro == NULL) return NULL; @@ -9427,7 +10092,7 @@ _super_lookup_descr(PyTypeObject *su_type, PyTypeObject *su_obj_type, PyObject * Py_INCREF(mro); do { PyObject *obj = PyTuple_GET_ITEM(mro, i); - PyObject *dict = _PyType_CAST(obj)->tp_dict; + PyObject *dict = lookup_tp_dict(_PyType_CAST(obj)); assert(dict != NULL && PyDict_Check(dict)); res = PyDict_GetItemWithError(dict, name); diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 7537c12e92680c..7726f2fb17afde 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -13452,8 +13452,6 @@ formatfloat(PyObject *v, struct unicode_format_arg_t *arg, if (arg->flags & F_ALT) dtoa_flags |= Py_DTSF_ALT; - if (arg->flags & F_NO_NEG_0) - dtoa_flags |= Py_DTSF_NO_NEG_0; p = PyOS_double_to_string(x, arg->ch, prec, dtoa_flags, NULL); if (p == NULL) return -1; @@ -14573,13 +14571,13 @@ _PyUnicode_InitGlobalObjects(PyInterpreterState *interp) PyStatus _PyUnicode_InitTypes(PyInterpreterState *interp) { - if (_PyStaticType_InitBuiltin(&EncodingMapType) < 0) { + if (_PyStaticType_InitBuiltin(interp, &EncodingMapType) < 0) { goto error; } - if (_PyStaticType_InitBuiltin(&PyFieldNameIter_Type) < 0) { + if (_PyStaticType_InitBuiltin(interp, &PyFieldNameIter_Type) < 0) { goto error; } - if (_PyStaticType_InitBuiltin(&PyFormatterIter_Type) < 0) { + if (_PyStaticType_InitBuiltin(interp, &PyFormatterIter_Type) < 0) { goto error; } return _PyStatus_OK(); @@ -15158,13 +15156,9 @@ unicode_is_finalizing(void) void _PyUnicode_FiniTypes(PyInterpreterState *interp) { - if (!_Py_IsMainInterpreter(interp)) { - return; - } - - _PyStaticType_Dealloc(&EncodingMapType); - _PyStaticType_Dealloc(&PyFieldNameIter_Type); - _PyStaticType_Dealloc(&PyFormatterIter_Type); + _PyStaticType_Dealloc(interp, &EncodingMapType); + _PyStaticType_Dealloc(interp, &PyFieldNameIter_Type); + _PyStaticType_Dealloc(interp, &PyFormatterIter_Type); } @@ -15196,12 +15190,18 @@ static PyMethodDef _string_methods[] = { {NULL, NULL} }; +static PyModuleDef_Slot module_slots[] = { + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, + {0, NULL} +}; + static struct PyModuleDef _string_module = { PyModuleDef_HEAD_INIT, .m_name = "_string", .m_doc = PyDoc_STR("string helper module"), .m_size = 0, .m_methods = _string_methods, + .m_slots = module_slots, }; PyMODINIT_FUNC diff --git a/Objects/weakrefobject.c b/Objects/weakrefobject.c index c1afe63ecf66f6..aee79fc1410b29 100644 --- a/Objects/weakrefobject.c +++ b/Objects/weakrefobject.c @@ -1017,9 +1017,9 @@ PyObject_ClearWeakRefs(PyObject *object) * or anything else. */ void -_PyStaticType_ClearWeakRefs(PyTypeObject *type) +_PyStaticType_ClearWeakRefs(PyInterpreterState *interp, PyTypeObject *type) { - static_builtin_state *state = _PyStaticType_GetState(type); + static_builtin_state *state = _PyStaticType_GetState(interp, type); PyObject **list = _PyStaticType_GET_WEAKREFS_LISTPTR(state); while (*list != NULL) { /* Note that clear_weakref() pops the first ref off the type's diff --git a/PC/_testconsole.c b/PC/_testconsole.c index f14a2d45b1be26..3221b985d01ba0 100644 --- a/PC/_testconsole.c +++ b/PC/_testconsole.c @@ -31,6 +31,7 @@ static int execfunc(PyObject *m) PyModuleDef_Slot testconsole_slots[] = { {Py_mod_exec, execfunc}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL}, }; diff --git a/PC/msvcrtmodule.c b/PC/msvcrtmodule.c index 090254befc934d..53ef26b732f615 100644 --- a/PC/msvcrtmodule.c +++ b/PC/msvcrtmodule.c @@ -661,6 +661,7 @@ exec_module(PyObject* m) static PyModuleDef_Slot msvcrt_slots[] = { {Py_mod_exec, exec_module}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/PC/pyconfig.h b/PC/pyconfig.h index 8a3bf8968ce29d..3415efe2dea117 100644 --- a/PC/pyconfig.h +++ b/PC/pyconfig.h @@ -330,6 +330,7 @@ Py_NO_ENABLE_SHARED to find out. Also support MS_NO_COREDLL for b/w compat */ # define SIZEOF_HKEY 8 # define SIZEOF_SIZE_T 8 # define ALIGNOF_SIZE_T 8 +# define ALIGNOF_MAX_ALIGN_T 8 /* configure.ac defines HAVE_LARGEFILE_SUPPORT iff sizeof(off_t) > sizeof(long), and sizeof(long long) >= sizeof(off_t). On Win64 the second condition is not true, but if fpos_t replaces off_t @@ -351,6 +352,7 @@ Py_NO_ENABLE_SHARED to find out. Also support MS_NO_COREDLL for b/w compat */ # else # define SIZEOF_TIME_T 4 # endif +# define ALIGNOF_MAX_ALIGN_T 8 #endif #ifdef _DEBUG diff --git a/PC/python3dll.c b/PC/python3dll.c index 706affa18351b3..7e848abccfd1fa 100755 --- a/PC/python3dll.c +++ b/PC/python3dll.c @@ -467,6 +467,7 @@ EXPORT_FUNC(PyObject_GetAttrString) EXPORT_FUNC(PyObject_GetBuffer) EXPORT_FUNC(PyObject_GetItem) EXPORT_FUNC(PyObject_GetIter) +EXPORT_FUNC(PyObject_GetTypeData) EXPORT_FUNC(PyObject_HasAttr) EXPORT_FUNC(PyObject_HasAttrString) EXPORT_FUNC(PyObject_Hash) @@ -618,6 +619,7 @@ EXPORT_FUNC(PyType_GetModuleState) EXPORT_FUNC(PyType_GetName) EXPORT_FUNC(PyType_GetQualName) EXPORT_FUNC(PyType_GetSlot) +EXPORT_FUNC(PyType_GetTypeDataSize) EXPORT_FUNC(PyType_IsSubtype) EXPORT_FUNC(PyType_Modified) EXPORT_FUNC(PyType_Ready) diff --git a/PC/winreg.c b/PC/winreg.c index 4884125c3609ad..e2d5322f458c2a 100644 --- a/PC/winreg.c +++ b/PC/winreg.c @@ -2184,6 +2184,7 @@ exec_module(PyObject *m) static PyModuleDef_Slot winreg_slots[] = { {Py_mod_exec, exec_module}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/PC/winsound.c b/PC/winsound.c index 17ce2ef423b1f9..68a917810f884d 100644 --- a/PC/winsound.c +++ b/PC/winsound.c @@ -235,6 +235,7 @@ exec_module(PyObject *module) static PyModuleDef_Slot sound_slots[] = { {Py_mod_exec, exec_module}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/PCbuild/_testcapi.vcxproj b/PCbuild/_testcapi.vcxproj index 439cd687fda61d..350f97f8ff41aa 100644 --- a/PCbuild/_testcapi.vcxproj +++ b/PCbuild/_testcapi.vcxproj @@ -98,6 +98,7 @@ + @@ -109,7 +110,9 @@ + + diff --git a/PCbuild/_testcapi.vcxproj.filters b/PCbuild/_testcapi.vcxproj.filters index 0e42e4982c21ff..af80f1eebb3c4d 100644 --- a/PCbuild/_testcapi.vcxproj.filters +++ b/PCbuild/_testcapi.vcxproj.filters @@ -24,6 +24,9 @@ Source Files + + Source Files + Source Files @@ -57,6 +60,9 @@ Source Files + + Source Files + Source Files diff --git a/PCbuild/get_externals.bat b/PCbuild/get_externals.bat index 128241393f9f09..30ee873af9af24 100644 --- a/PCbuild/get_externals.bat +++ b/PCbuild/get_externals.bat @@ -54,7 +54,7 @@ set libraries= set libraries=%libraries% bzip2-1.0.8 if NOT "%IncludeLibffiSrc%"=="false" set libraries=%libraries% libffi-3.4.4 if NOT "%IncludeSSLSrc%"=="false" set libraries=%libraries% openssl-1.1.1t -set libraries=%libraries% sqlite-3.40.1.0 +set libraries=%libraries% sqlite-3.41.2.0 if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tcl-core-8.6.13.0 if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tk-8.6.13.0 if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tix-8.4.3.6 diff --git a/PCbuild/python.props b/PCbuild/python.props index 7994fbe7cd5e0b..29add07795f900 100644 --- a/PCbuild/python.props +++ b/PCbuild/python.props @@ -68,7 +68,7 @@ - $(ExternalsDir)sqlite-3.40.1.0\ + $(ExternalsDir)sqlite-3.41.2.0\ $(ExternalsDir)bzip2-1.0.8\ $(ExternalsDir)xz-5.2.5\ $(ExternalsDir)libffi-3.4.4\ diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index 8aafcb786a6064..28b1517c6f6b3a 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -375,13 +375,16 @@ + + + + - @@ -404,17 +407,15 @@ - - - + diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index 07476f30833372..75e6fbb13f98ba 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -776,6 +776,15 @@ Modules + + Modules + + + Modules + + + Modules + Modules @@ -794,9 +803,6 @@ Modules - - Modules - Modules @@ -875,16 +881,13 @@ Modules - - Modules - Modules - + Modules - + Modules diff --git a/PCbuild/readme.txt b/PCbuild/readme.txt index 4c799b64c461c1..9df56685b76a87 100644 --- a/PCbuild/readme.txt +++ b/PCbuild/readme.txt @@ -188,7 +188,7 @@ _ssl again when building. _sqlite3 - Wraps SQLite 3.40.1, which is itself built by sqlite3.vcxproj + Wraps SQLite 3.41.2, which is itself built by sqlite3.vcxproj Homepage: https://www.sqlite.org/ _tkinter diff --git a/PCbuild/regen.targets b/PCbuild/regen.targets index aeb7e2e185d9f8..107066817ba6b0 100644 --- a/PCbuild/regen.targets +++ b/PCbuild/regen.targets @@ -59,7 +59,7 @@ Inputs="@(_OpcodeSources)" Outputs="@(_OpcodeOutputs)" DependsOnTargets="FindPythonForBuild"> - diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py index b44e303ac2594b..5d5a05a70ca7ec 100755 --- a/Parser/asdl_c.py +++ b/Parser/asdl_c.py @@ -1206,6 +1206,7 @@ def visitModule(self, mod): self.emit(""" static PyModuleDef_Slot astmodule_slots[] = { {Py_mod_exec, astmodule_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c index 8de0572a1fc459..91ffabac56c7b3 100644 --- a/Parser/tokenizer.c +++ b/Parser/tokenizer.c @@ -43,12 +43,12 @@ #ifdef Py_DEBUG static inline tokenizer_mode* TOK_GET_MODE(struct tok_state* tok) { assert(tok->tok_mode_stack_index >= 0); - assert(tok->tok_mode_stack_index < MAXLEVEL); + assert(tok->tok_mode_stack_index < MAXFSTRINGLEVEL); return &(tok->tok_mode_stack[tok->tok_mode_stack_index]); } static inline tokenizer_mode* TOK_NEXT_MODE(struct tok_state* tok) { assert(tok->tok_mode_stack_index >= 0); - assert(tok->tok_mode_stack_index < MAXLEVEL); + assert(tok->tok_mode_stack_index + 1 < MAXFSTRINGLEVEL); return &(tok->tok_mode_stack[++tok->tok_mode_stack_index]); } #else @@ -1277,6 +1277,12 @@ _syntaxerror_range(struct tok_state *tok, const char *format, int col_offset, int end_col_offset, va_list vargs) { + // In release builds, we don't want to overwrite a previous error, but in debug builds we + // want to fail if we are not doing it so we can fix it. + assert(tok->done != E_ERROR); + if (tok->done == E_ERROR) { + return ERRORTOKEN; + } PyObject *errmsg, *errtext, *args; errmsg = PyUnicode_FromFormatV(format, vargs); if (!errmsg) { @@ -2235,6 +2241,9 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t p_start = tok->start; p_end = tok->cur; + if (tok->tok_mode_stack_index + 1 >= MAXFSTRINGLEVEL) { + return MAKE_TOKEN(syntaxerror(tok, "too many nested f-strings")); + } tokenizer_mode *the_current_tok = TOK_NEXT_MODE(tok); the_current_tok->kind = TOK_FSTRING_MODE; the_current_tok->f_string_quote = quote; @@ -2298,8 +2307,12 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t /* Get rest of string */ while (end_quote_size != quote_size) { c = tok_nextc(tok); - if (tok->done == E_DECODE) + if (tok->done == E_ERROR) { + return MAKE_TOKEN(ERRORTOKEN); + } + if (tok->done == E_DECODE) { break; + } if (c == EOF || (quote_size == 1 && c == '\n')) { assert(tok->multi_line_start != NULL); // shift the tok_state's location into @@ -2551,7 +2564,14 @@ tok_get_fstring_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct while (end_quote_size != current_tok->f_string_quote_size) { int c = tok_nextc(tok); + if (tok->done == E_ERROR) { + return MAKE_TOKEN(ERRORTOKEN); + } if (c == EOF || (current_tok->f_string_quote_size == 1 && c == '\n')) { + if (tok->decoding_erred) { + return MAKE_TOKEN(ERRORTOKEN); + } + assert(tok->multi_line_start != NULL); // shift the tok_state's location into // the start of string, and report the error diff --git a/Parser/tokenizer.h b/Parser/tokenizer.h index 8b4213c4ce3b5a..5e2171885ac75b 100644 --- a/Parser/tokenizer.h +++ b/Parser/tokenizer.h @@ -10,8 +10,9 @@ extern "C" { #include "pycore_token.h" /* For token types */ -#define MAXINDENT 100 /* Max indentation level */ -#define MAXLEVEL 200 /* Max parentheses level */ +#define MAXINDENT 100 /* Max indentation level */ +#define MAXLEVEL 200 /* Max parentheses level */ +#define MAXFSTRINGLEVEL 150 /* Max f-string nesting level */ enum decoding_state { STATE_INIT, @@ -123,7 +124,7 @@ struct tok_state { enum interactive_underflow_t interactive_underflow; int report_warnings; // TODO: Factor this into its own thing - tokenizer_mode tok_mode_stack[MAXLEVEL]; + tokenizer_mode tok_mode_stack[MAXFSTRINGLEVEL]; int tok_mode_stack_index; int tok_report_warnings; #ifdef Py_DEBUG diff --git a/Python/Python-ast.c b/Python/Python-ast.c index 6c878474afb192..81ab71c0fc3b29 100644 --- a/Python/Python-ast.c +++ b/Python/Python-ast.c @@ -12193,6 +12193,7 @@ astmodule_exec(PyObject *m) static PyModuleDef_Slot astmodule_slots[] = { {Py_mod_exec, astmodule_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Python/Python-tokenize.c b/Python/Python-tokenize.c index 416dc5971bca3d..3394a5108cb535 100644 --- a/Python/Python-tokenize.c +++ b/Python/Python-tokenize.c @@ -151,6 +151,7 @@ static PyMethodDef tokenize_methods[] = { static PyModuleDef_Slot tokenizemodule_slots[] = { {Py_mod_exec, tokenizemodule_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Python/_warnings.c b/Python/_warnings.c index d510381c365b66..5644db9a3770cb 100644 --- a/Python/_warnings.c +++ b/Python/_warnings.c @@ -1449,6 +1449,7 @@ warnings_module_exec(PyObject *module) static PyModuleDef_Slot warnings_slots[] = { {Py_mod_exec, warnings_module_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Python/assemble.c b/Python/assemble.c index 369dd8dcde9b9b..6889831ae3fe0c 100644 --- a/Python/assemble.c +++ b/Python/assemble.c @@ -456,6 +456,9 @@ compute_localsplus_info(_PyCompile_CodeUnitMetadata *umd, int nlocalsplus, assert(offset < nlocalsplus); // For now we do not distinguish arg kinds. _PyLocals_Kind kind = CO_FAST_LOCAL; + if (PyDict_Contains(umd->u_fasthidden, k)) { + kind |= CO_FAST_HIDDEN; + } if (PyDict_GetItem(umd->u_cellvars, k) != NULL) { kind |= CO_FAST_CELL; } diff --git a/Python/ast_opt.c b/Python/ast_opt.c index 8270fa8e372d93..3883ec9e21c765 100644 --- a/Python/ast_opt.c +++ b/Python/ast_opt.c @@ -317,7 +317,6 @@ simple_format_arg_parse(PyObject *fmt, Py_ssize_t *ppos, case ' ': *flags |= F_BLANK; continue; case '#': *flags |= F_ALT; continue; case '0': *flags |= F_ZERO; continue; - case 'z': *flags |= F_NO_NEG_0; continue; } break; } diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c index 8840bbabe4b584..ddddc03ca316e0 100644 --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -3014,9 +3014,16 @@ static PyMethodDef builtin_methods[] = { }; PyDoc_STRVAR(builtin_doc, -"Built-in functions, exceptions, and other objects.\n\ +"Built-in functions, types, exceptions, and other objects.\n\ \n\ -Noteworthy: None is the `nil' object; Ellipsis represents `...' in slices."); +This module provides direct access to all 'built-in'\n\ +identifiers of Python; for example, builtins.len is\n\ +the full name for the built-in function len().\n\ +\n\ +This module is not normally accessed explicitly by most\n\ +applications, but can be useful in modules that provide\n\ +objects with the same name as a built-in value, but in\n\ +which the built-in of that name is also needed."); static struct PyModuleDef builtinsmodule = { PyModuleDef_HEAD_INIT, diff --git a/Python/bytecodes.c b/Python/bytecodes.c index 6914d8211ba4ef..7edd68b385d1dc 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -194,6 +194,12 @@ dummy_func( Py_INCREF(value); } + inst(LOAD_FAST_AND_CLEAR, (-- value)) { + value = GETLOCAL(oparg); + // do not use SETLOCAL here, it decrefs the old value + GETLOCAL(oparg) = NULL; + } + inst(LOAD_CONST, (-- value)) { value = GETITEM(frame->f_code->co_consts, oparg); Py_INCREF(value); diff --git a/Python/ceval.c b/Python/ceval.c index 958689debc87f8..56a3b123f46331 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -27,6 +27,7 @@ #include "pycore_dict.h" #include "dictobject.h" #include "pycore_frame.h" +#include "frameobject.h" // _PyInterpreterFrame_GetLine #include "opcode.h" #include "pydtrace.h" #include "setobject.h" @@ -785,7 +786,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int _PyErr_Format(tstate, PyExc_SystemError, "%U:%d: unknown opcode %d", frame->f_code->co_filename, - _PyInterpreterFrame_GetLine(frame), + PyUnstable_InterpreterFrame_GetLine(frame), opcode); goto error; diff --git a/Python/ceval_gil.c b/Python/ceval_gil.c index 29796be4b80e93..42e1436bc9130d 100644 --- a/Python/ceval_gil.c +++ b/Python/ceval_gil.c @@ -229,6 +229,9 @@ static void _gil_initialize(struct _gil_runtime_state *gil) static int gil_created(struct _gil_runtime_state *gil) { + if (gil == NULL) { + return 0; + } return (_Py_atomic_load_explicit(&gil->locked, _Py_memory_order_acquire) >= 0); } @@ -273,10 +276,9 @@ static void recreate_gil(struct _gil_runtime_state *gil) #endif static void -drop_gil(struct _ceval_runtime_state *ceval, struct _ceval_state *ceval2, - PyThreadState *tstate) +drop_gil(struct _ceval_state *ceval, PyThreadState *tstate) { - struct _gil_runtime_state *gil = &ceval->gil; + struct _gil_runtime_state *gil = ceval->gil; if (!_Py_atomic_load_relaxed(&gil->locked)) { Py_FatalError("drop_gil: GIL is not locked"); } @@ -296,7 +298,7 @@ drop_gil(struct _ceval_runtime_state *ceval, struct _ceval_state *ceval2, MUTEX_UNLOCK(gil->mutex); #ifdef FORCE_SWITCHING - if (_Py_atomic_load_relaxed(&ceval2->gil_drop_request) && tstate != NULL) { + if (_Py_atomic_load_relaxed(&ceval->gil_drop_request) && tstate != NULL) { MUTEX_LOCK(gil->switch_mutex); /* Not switched yet => wait */ if (((PyThreadState*)_Py_atomic_load_relaxed(&gil->last_holder)) == tstate) @@ -358,9 +360,8 @@ take_gil(PyThreadState *tstate) assert(is_tstate_valid(tstate)); PyInterpreterState *interp = tstate->interp; - struct _ceval_runtime_state *ceval = &interp->runtime->ceval; - struct _ceval_state *ceval2 = &interp->ceval; - struct _gil_runtime_state *gil = &ceval->gil; + struct _ceval_state *ceval = &interp->ceval; + struct _gil_runtime_state *gil = ceval->gil; /* Check that _PyEval_InitThreads() was called to create the lock */ assert(gil_created(gil)); @@ -434,12 +435,12 @@ take_gil(PyThreadState *tstate) in take_gil() while the main thread called wait_for_thread_shutdown() from Py_Finalize(). */ MUTEX_UNLOCK(gil->mutex); - drop_gil(ceval, ceval2, tstate); + drop_gil(ceval, tstate); PyThread_exit_thread(); } assert(is_tstate_valid(tstate)); - if (_Py_atomic_load_relaxed(&ceval2->gil_drop_request)) { + if (_Py_atomic_load_relaxed(&ceval->gil_drop_request)) { RESET_GIL_DROP_REQUEST(interp); } else { @@ -448,7 +449,7 @@ take_gil(PyThreadState *tstate) handle signals. Note: RESET_GIL_DROP_REQUEST() calls COMPUTE_EVAL_BREAKER(). */ - COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); + COMPUTE_EVAL_BREAKER(interp, &_PyRuntime.ceval, ceval); } /* Don't access tstate if the thread must exit */ @@ -463,63 +464,112 @@ take_gil(PyThreadState *tstate) void _PyEval_SetSwitchInterval(unsigned long microseconds) { - struct _gil_runtime_state *gil = &_PyRuntime.ceval.gil; + PyInterpreterState *interp = _PyInterpreterState_Get(); + struct _gil_runtime_state *gil = interp->ceval.gil; + assert(gil != NULL); gil->interval = microseconds; } unsigned long _PyEval_GetSwitchInterval(void) { - struct _gil_runtime_state *gil = &_PyRuntime.ceval.gil; + PyInterpreterState *interp = _PyInterpreterState_Get(); + struct _gil_runtime_state *gil = interp->ceval.gil; + assert(gil != NULL); return gil->interval; } int -_PyEval_ThreadsInitialized(_PyRuntimeState *runtime) +_PyEval_ThreadsInitialized(void) { - return gil_created(&runtime->ceval.gil); + /* XXX This is only needed for an assert in PyGILState_Ensure(), + * which currently does not work with subinterpreters. + * Thus we only use the main interpreter. */ + PyInterpreterState *interp = _PyInterpreterState_Main(); + if (interp == NULL) { + return 0; + } + struct _gil_runtime_state *gil = interp->ceval.gil; + return gil_created(gil); } int PyEval_ThreadsInitialized(void) { - _PyRuntimeState *runtime = &_PyRuntime; - return _PyEval_ThreadsInitialized(runtime); + return _PyEval_ThreadsInitialized(); } -PyStatus -_PyEval_InitGIL(PyThreadState *tstate) +static inline int +current_thread_holds_gil(struct _gil_runtime_state *gil, PyThreadState *tstate) { - if (!_Py_IsMainInterpreter(tstate->interp)) { - /* Currently, the GIL is shared by all interpreters, - and only the main interpreter is responsible to create - and destroy it. */ - return _PyStatus_OK(); + if (((PyThreadState*)_Py_atomic_load_relaxed(&gil->last_holder)) != tstate) { + return 0; } + return _Py_atomic_load_relaxed(&gil->locked); +} - struct _gil_runtime_state *gil = &tstate->interp->runtime->ceval.gil; - assert(!gil_created(gil)); +static void +init_shared_gil(PyInterpreterState *interp, struct _gil_runtime_state *gil) +{ + assert(gil_created(gil)); + interp->ceval.gil = gil; + interp->ceval.own_gil = 0; +} - PyThread_init_thread(); +static void +init_own_gil(PyInterpreterState *interp, struct _gil_runtime_state *gil) +{ + assert(!gil_created(gil)); create_gil(gil); + assert(gil_created(gil)); + interp->ceval.gil = gil; + interp->ceval.own_gil = 1; +} - take_gil(tstate); +PyStatus +_PyEval_InitGIL(PyThreadState *tstate, int own_gil) +{ + assert(tstate->interp->ceval.gil == NULL); + int locked; + if (!own_gil) { + /* The interpreter will share the main interpreter's instead. */ + PyInterpreterState *main_interp = _PyInterpreterState_Main(); + assert(tstate->interp != main_interp); + struct _gil_runtime_state *gil = main_interp->ceval.gil; + init_shared_gil(tstate->interp, gil); + locked = current_thread_holds_gil(gil, tstate); + } + else { + PyThread_init_thread(); + init_own_gil(tstate->interp, &tstate->interp->_gil); + locked = 0; + } + if (!locked) { + take_gil(tstate); + } - assert(gil_created(gil)); return _PyStatus_OK(); } void _PyEval_FiniGIL(PyInterpreterState *interp) { - if (!_Py_IsMainInterpreter(interp)) { - /* Currently, the GIL is shared by all interpreters, - and only the main interpreter is responsible to create - and destroy it. */ + struct _gil_runtime_state *gil = interp->ceval.gil; + if (gil == NULL) { + /* It was already finalized (or hasn't been initialized yet). */ + assert(!interp->ceval.own_gil); + return; + } + else if (!interp->ceval.own_gil) { +#ifdef Py_DEBUG + PyInterpreterState *main_interp = _PyInterpreterState_Main(); + assert(main_interp != NULL && interp != main_interp); + assert(interp->ceval.gil == main_interp->ceval.gil); +#endif + interp->ceval.gil = NULL; return; } - struct _gil_runtime_state *gil = &interp->runtime->ceval.gil; if (!gil_created(gil)) { /* First Py_InitializeFromConfig() call: the GIL doesn't exist yet: do nothing. */ @@ -528,6 +578,7 @@ _PyEval_FiniGIL(PyInterpreterState *interp) destroy_gil(gil); assert(!gil_created(gil)); + interp->ceval.gil = NULL; } void @@ -546,8 +597,7 @@ _PyEval_Fini(void) void PyEval_AcquireLock(void) { - _PyRuntimeState *runtime = &_PyRuntime; - PyThreadState *tstate = _PyRuntimeState_GetThreadState(runtime); + PyThreadState *tstate = _PyThreadState_GET(); _Py_EnsureTstateNotNULL(tstate); take_gil(tstate); @@ -556,22 +606,27 @@ PyEval_AcquireLock(void) void PyEval_ReleaseLock(void) { - _PyRuntimeState *runtime = &_PyRuntime; - PyThreadState *tstate = _PyRuntimeState_GetThreadState(runtime); + PyThreadState *tstate = _PyThreadState_GET(); /* This function must succeed when the current thread state is NULL. We therefore avoid PyThreadState_Get() which dumps a fatal error in debug mode. */ - struct _ceval_runtime_state *ceval = &runtime->ceval; - struct _ceval_state *ceval2 = &tstate->interp->ceval; - drop_gil(ceval, ceval2, tstate); + struct _ceval_state *ceval = &tstate->interp->ceval; + drop_gil(ceval, tstate); +} + +void +_PyEval_AcquireLock(PyThreadState *tstate) +{ + _Py_EnsureTstateNotNULL(tstate); + take_gil(tstate); } void _PyEval_ReleaseLock(PyThreadState *tstate) { - struct _ceval_runtime_state *ceval = &tstate->interp->runtime->ceval; - struct _ceval_state *ceval2 = &tstate->interp->ceval; - drop_gil(ceval, ceval2, tstate); + _Py_EnsureTstateNotNULL(tstate); + struct _ceval_state *ceval = &tstate->interp->ceval; + drop_gil(ceval, tstate); } void @@ -581,7 +636,7 @@ PyEval_AcquireThread(PyThreadState *tstate) take_gil(tstate); - if (_PyThreadState_Swap(tstate->interp->runtime, tstate) != NULL) { + if (_PyThreadState_SwapNoGIL(tstate) != NULL) { Py_FatalError("non-NULL old thread state"); } } @@ -591,14 +646,12 @@ PyEval_ReleaseThread(PyThreadState *tstate) { assert(is_tstate_valid(tstate)); - _PyRuntimeState *runtime = tstate->interp->runtime; - PyThreadState *new_tstate = _PyThreadState_Swap(runtime, NULL); + PyThreadState *new_tstate = _PyThreadState_SwapNoGIL(NULL); if (new_tstate != tstate) { Py_FatalError("wrong thread state"); } - struct _ceval_runtime_state *ceval = &runtime->ceval; - struct _ceval_state *ceval2 = &tstate->interp->ceval; - drop_gil(ceval, ceval2, tstate); + struct _ceval_state *ceval = &tstate->interp->ceval; + drop_gil(ceval, tstate); } #ifdef HAVE_FORK @@ -608,9 +661,9 @@ PyEval_ReleaseThread(PyThreadState *tstate) PyStatus _PyEval_ReInitThreads(PyThreadState *tstate) { - _PyRuntimeState *runtime = tstate->interp->runtime; + assert(tstate->interp == _PyInterpreterState_Main()); - struct _gil_runtime_state *gil = &runtime->ceval.gil; + struct _gil_runtime_state *gil = tstate->interp->ceval.gil; if (!gil_created(gil)) { return _PyStatus_OK(); } @@ -641,14 +694,12 @@ _PyEval_SignalAsyncExc(PyInterpreterState *interp) PyThreadState * PyEval_SaveThread(void) { - _PyRuntimeState *runtime = &_PyRuntime; - PyThreadState *tstate = _PyThreadState_Swap(runtime, NULL); + PyThreadState *tstate = _PyThreadState_SwapNoGIL(NULL); _Py_EnsureTstateNotNULL(tstate); - struct _ceval_runtime_state *ceval = &runtime->ceval; - struct _ceval_state *ceval2 = &tstate->interp->ceval; - assert(gil_created(&ceval->gil)); - drop_gil(ceval, ceval2, tstate); + struct _ceval_state *ceval = &tstate->interp->ceval; + assert(gil_created(ceval->gil)); + drop_gil(ceval, tstate); return tstate; } @@ -659,7 +710,7 @@ PyEval_RestoreThread(PyThreadState *tstate) take_gil(tstate); - _PyThreadState_Swap(tstate->interp->runtime, tstate); + _PyThreadState_SwapNoGIL(tstate); } @@ -902,20 +953,13 @@ Py_MakePendingCalls(void) return 0; } -/* The interpreter's recursion limit */ - void -_PyEval_InitRuntimeState(struct _ceval_runtime_state *ceval) +_PyEval_InitState(PyInterpreterState *interp, PyThread_type_lock pending_lock) { - _gil_initialize(&ceval->gil); -} + _gil_initialize(&interp->_gil); -void -_PyEval_InitState(struct _ceval_state *ceval, PyThread_type_lock pending_lock) -{ - struct _pending_calls *pending = &ceval->pending; + struct _pending_calls *pending = &interp->ceval.pending; assert(pending->lock == NULL); - pending->lock = pending_lock; } @@ -962,16 +1006,16 @@ _Py_HandlePending(PyThreadState *tstate) /* GIL drop request */ if (_Py_atomic_load_relaxed_int32(&interp_ceval_state->gil_drop_request)) { /* Give another thread a chance */ - if (_PyThreadState_Swap(runtime, NULL) != tstate) { + if (_PyThreadState_SwapNoGIL(NULL) != tstate) { Py_FatalError("tstate mix-up"); } - drop_gil(ceval, interp_ceval_state, tstate); + drop_gil(interp_ceval_state, tstate); /* Other threads may run now */ take_gil(tstate); - if (_PyThreadState_Swap(runtime, tstate) != NULL) { + if (_PyThreadState_SwapNoGIL(tstate) != NULL) { Py_FatalError("orphan tstate"); } } diff --git a/Python/compile.c b/Python/compile.c index 2075b1672b690c..1fdbf1b85b4295 100644 --- a/Python/compile.c +++ b/Python/compile.c @@ -308,7 +308,6 @@ instr_sequence_fini(instr_sequence *seq) { seq->s_instrs = NULL; } - static int instr_sequence_to_cfg(instr_sequence *seq, cfg_builder *g) { memset(g, 0, sizeof(cfg_builder)); @@ -382,7 +381,6 @@ struct compiler_unit { int u_scope_type; - PyObject *u_private; /* for private name mangling */ instr_sequence u_instr_sequence; /* codegen output */ @@ -486,13 +484,15 @@ static int compiler_sync_comprehension_generator( struct compiler *c, location loc, asdl_comprehension_seq *generators, int gen_index, int depth, - expr_ty elt, expr_ty val, int type); + expr_ty elt, expr_ty val, int type, + int iter_on_stack); static int compiler_async_comprehension_generator( struct compiler *c, location loc, asdl_comprehension_seq *generators, int gen_index, int depth, - expr_ty elt, expr_ty val, int type); + expr_ty elt, expr_ty val, int type, + int iter_on_stack); static int compiler_pattern(struct compiler *, pattern_ty, pattern_context *); static int compiler_match(struct compiler *, stmt_ty); @@ -690,6 +690,7 @@ compiler_unit_free(struct compiler_unit *u) Py_CLEAR(u->u_metadata.u_varnames); Py_CLEAR(u->u_metadata.u_freevars); Py_CLEAR(u->u_metadata.u_cellvars); + Py_CLEAR(u->u_metadata.u_fasthidden); Py_CLEAR(u->u_private); PyObject_Free(u); } @@ -838,6 +839,8 @@ stack_effect(int opcode, int oparg, int jump) * if an exception be raised. */ return jump ? 1 : 0; + case STORE_FAST_MAYBE_NULL: + return -1; case LOAD_METHOD: return 1; case LOAD_SUPER_METHOD: @@ -1240,11 +1243,9 @@ compiler_enter_scope(struct compiler *c, identifier name, } if (u->u_ste->ste_needs_class_closure) { /* Cook up an implicit __class__ cell. */ - int res; + Py_ssize_t res; assert(u->u_scope_type == COMPILER_SCOPE_CLASS); - assert(PyDict_GET_SIZE(u->u_metadata.u_cellvars) == 0); - res = PyDict_SetItem(u->u_metadata.u_cellvars, &_Py_ID(__class__), - _PyLong_GetZero()); + res = dict_add_o(u->u_metadata.u_cellvars, &_Py_ID(__class__)); if (res < 0) { compiler_unit_free(u); return ERROR; @@ -1258,6 +1259,12 @@ compiler_enter_scope(struct compiler *c, identifier name, return ERROR; } + u->u_metadata.u_fasthidden = PyDict_New(); + if (!u->u_metadata.u_fasthidden) { + compiler_unit_free(u); + return ERROR; + } + u->u_nfblocks = 0; u->u_metadata.u_firstlineno = lineno; u->u_metadata.u_consts = PyDict_New(); @@ -2236,7 +2243,6 @@ compiler_class(struct compiler *c, stmt_ty s) compiler_exit_scope(c); return ERROR; } - assert(i == 0); ADDOP_I(c, NO_LOCATION, LOAD_CLOSURE, i); ADDOP_I(c, NO_LOCATION, COPY, 1); if (compiler_nameop(c, NO_LOCATION, &_Py_ID(__classcell__), Store) < 0) { @@ -2246,7 +2252,6 @@ compiler_class(struct compiler *c, stmt_ty s) } else { /* No methods referenced __class__, so just return None */ - assert(PyDict_GET_SIZE(c->u->u_metadata.u_cellvars) == 0); ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None); } ADDOP_IN_SCOPE(c, NO_LOCATION, RETURN_VALUE); @@ -3719,7 +3724,8 @@ compiler_nameop(struct compiler *c, location loc, optype = OP_DEREF; break; case LOCAL: - if (c->u->u_ste->ste_type == FunctionBlock) + if (c->u->u_ste->ste_type == FunctionBlock || + (PyDict_GetItem(c->u->u_metadata.u_fasthidden, mangled) == Py_True)) optype = OP_FAST; break; case GLOBAL_IMPLICIT: @@ -4745,16 +4751,19 @@ static int compiler_comprehension_generator(struct compiler *c, location loc, asdl_comprehension_seq *generators, int gen_index, int depth, - expr_ty elt, expr_ty val, int type) + expr_ty elt, expr_ty val, int type, + int iter_on_stack) { comprehension_ty gen; gen = (comprehension_ty)asdl_seq_GET(generators, gen_index); if (gen->is_async) { return compiler_async_comprehension_generator( - c, loc, generators, gen_index, depth, elt, val, type); + c, loc, generators, gen_index, depth, elt, val, type, + iter_on_stack); } else { return compiler_sync_comprehension_generator( - c, loc, generators, gen_index, depth, elt, val, type); + c, loc, generators, gen_index, depth, elt, val, type, + iter_on_stack); } } @@ -4762,7 +4771,8 @@ static int compiler_sync_comprehension_generator(struct compiler *c, location loc, asdl_comprehension_seq *generators, int gen_index, int depth, - expr_ty elt, expr_ty val, int type) + expr_ty elt, expr_ty val, int type, + int iter_on_stack) { /* generate code for the iterator, then each of the ifs, and then write to the element */ @@ -4774,37 +4784,39 @@ compiler_sync_comprehension_generator(struct compiler *c, location loc, comprehension_ty gen = (comprehension_ty)asdl_seq_GET(generators, gen_index); - if (gen_index == 0) { - /* Receive outermost iter as an implicit argument */ - c->u->u_metadata.u_argcount = 1; - ADDOP_I(c, loc, LOAD_FAST, 0); - } - else { - /* Sub-iter - calculate on the fly */ - /* Fast path for the temporary variable assignment idiom: - for y in [f(x)] - */ - asdl_expr_seq *elts; - switch (gen->iter->kind) { - case List_kind: - elts = gen->iter->v.List.elts; - break; - case Tuple_kind: - elts = gen->iter->v.Tuple.elts; - break; - default: - elts = NULL; - } - if (asdl_seq_LEN(elts) == 1) { - expr_ty elt = asdl_seq_GET(elts, 0); - if (elt->kind != Starred_kind) { - VISIT(c, expr, elt); - start = NO_LABEL; - } + if (!iter_on_stack) { + if (gen_index == 0) { + /* Receive outermost iter as an implicit argument */ + c->u->u_metadata.u_argcount = 1; + ADDOP_I(c, loc, LOAD_FAST, 0); } - if (IS_LABEL(start)) { - VISIT(c, expr, gen->iter); - ADDOP(c, loc, GET_ITER); + else { + /* Sub-iter - calculate on the fly */ + /* Fast path for the temporary variable assignment idiom: + for y in [f(x)] + */ + asdl_expr_seq *elts; + switch (gen->iter->kind) { + case List_kind: + elts = gen->iter->v.List.elts; + break; + case Tuple_kind: + elts = gen->iter->v.Tuple.elts; + break; + default: + elts = NULL; + } + if (asdl_seq_LEN(elts) == 1) { + expr_ty elt = asdl_seq_GET(elts, 0); + if (elt->kind != Starred_kind) { + VISIT(c, expr, elt); + start = NO_LABEL; + } + } + if (IS_LABEL(start)) { + VISIT(c, expr, gen->iter); + ADDOP(c, loc, GET_ITER); + } } } if (IS_LABEL(start)) { @@ -4825,7 +4837,7 @@ compiler_sync_comprehension_generator(struct compiler *c, location loc, RETURN_IF_ERROR( compiler_comprehension_generator(c, loc, generators, gen_index, depth, - elt, val, type)); + elt, val, type, 0)); } location elt_loc = LOC(elt); @@ -4878,7 +4890,8 @@ static int compiler_async_comprehension_generator(struct compiler *c, location loc, asdl_comprehension_seq *generators, int gen_index, int depth, - expr_ty elt, expr_ty val, int type) + expr_ty elt, expr_ty val, int type, + int iter_on_stack) { NEW_JUMP_TARGET_LABEL(c, start); NEW_JUMP_TARGET_LABEL(c, except); @@ -4887,15 +4900,17 @@ compiler_async_comprehension_generator(struct compiler *c, location loc, comprehension_ty gen = (comprehension_ty)asdl_seq_GET(generators, gen_index); - if (gen_index == 0) { - /* Receive outermost iter as an implicit argument */ - c->u->u_metadata.u_argcount = 1; - ADDOP_I(c, loc, LOAD_FAST, 0); - } - else { - /* Sub-iter - calculate on the fly */ - VISIT(c, expr, gen->iter); - ADDOP(c, loc, GET_AITER); + if (!iter_on_stack) { + if (gen_index == 0) { + /* Receive outermost iter as an implicit argument */ + c->u->u_metadata.u_argcount = 1; + ADDOP_I(c, loc, LOAD_FAST, 0); + } + else { + /* Sub-iter - calculate on the fly */ + VISIT(c, expr, gen->iter); + ADDOP(c, loc, GET_AITER); + } } USE_LABEL(c, start); @@ -4922,7 +4937,7 @@ compiler_async_comprehension_generator(struct compiler *c, location loc, RETURN_IF_ERROR( compiler_comprehension_generator(c, loc, generators, gen_index, depth, - elt, val, type)); + elt, val, type, 0)); } location elt_loc = LOC(elt); @@ -4971,26 +4986,212 @@ compiler_async_comprehension_generator(struct compiler *c, location loc, return SUCCESS; } +typedef struct { + PyObject *pushed_locals; + PyObject *temp_symbols; + PyObject *fast_hidden; +} inlined_comprehension_state; + +static int +push_inlined_comprehension_state(struct compiler *c, location loc, + PySTEntryObject *entry, + inlined_comprehension_state *state) +{ + // iterate over names bound in the comprehension and ensure we isolate + // them from the outer scope as needed + PyObject *k, *v; + Py_ssize_t pos = 0; + while (PyDict_Next(entry->ste_symbols, &pos, &k, &v)) { + assert(PyLong_Check(v)); + long symbol = PyLong_AS_LONG(v); + // only values bound in the comprehension (DEF_LOCAL) need to be handled + // at all; DEF_LOCAL | DEF_NONLOCAL can occur in the case of an + // assignment expression to a nonlocal in the comprehension, these don't + // need handling here since they shouldn't be isolated + if (symbol & DEF_LOCAL && !(symbol & DEF_NONLOCAL)) { + if (c->u->u_ste->ste_type != FunctionBlock) { + // non-function scope: override this name to use fast locals + PyObject *orig = PyDict_GetItem(c->u->u_metadata.u_fasthidden, k); + if (orig != Py_True) { + if (PyDict_SetItem(c->u->u_metadata.u_fasthidden, k, Py_True) < 0) { + return ERROR; + } + if (state->fast_hidden == NULL) { + state->fast_hidden = PySet_New(NULL); + if (state->fast_hidden == NULL) { + return ERROR; + } + } + if (PySet_Add(state->fast_hidden, k) < 0) { + return ERROR; + } + } + } + long scope = (symbol >> SCOPE_OFFSET) & SCOPE_MASK; + PyObject *outv = PyDict_GetItemWithError(c->u->u_ste->ste_symbols, k); + if (outv == NULL) { + return ERROR; + } + assert(PyLong_Check(outv)); + long outsc = (PyLong_AS_LONG(outv) >> SCOPE_OFFSET) & SCOPE_MASK; + if (scope != outsc) { + // If a name has different scope inside than outside the + // comprehension, we need to temporarily handle it with the + // right scope while compiling the comprehension. + if (state->temp_symbols == NULL) { + state->temp_symbols = PyDict_New(); + if (state->temp_symbols == NULL) { + return ERROR; + } + } + // update the symbol to the in-comprehension version and save + // the outer version; we'll restore it after running the + // comprehension + Py_INCREF(outv); + if (PyDict_SetItem(c->u->u_ste->ste_symbols, k, v) < 0) { + Py_DECREF(outv); + return ERROR; + } + if (PyDict_SetItem(state->temp_symbols, k, outv) < 0) { + Py_DECREF(outv); + return ERROR; + } + Py_DECREF(outv); + } + if (outsc == LOCAL || outsc == CELL || outsc == FREE) { + // local names bound in comprehension must be isolated from + // outer scope; push existing value (which may be NULL if + // not defined) on stack + if (state->pushed_locals == NULL) { + state->pushed_locals = PyList_New(0); + if (state->pushed_locals == NULL) { + return ERROR; + } + } + // in the case of a cell, this will actually push the cell + // itself to the stack, then we'll create a new one for the + // comprehension and restore the original one after + ADDOP_NAME(c, loc, LOAD_FAST_AND_CLEAR, k, varnames); + if (scope == CELL) { + ADDOP_NAME(c, loc, MAKE_CELL, k, cellvars); + } + if (PyList_Append(state->pushed_locals, k) < 0) { + return ERROR; + } + } + } + } + if (state->pushed_locals) { + // Outermost iterable expression was already evaluated and is on the + // stack, we need to swap it back to TOS. This also rotates the order of + // `pushed_locals` on the stack, but this will be reversed when we swap + // out the comprehension result in pop_inlined_comprehension_state + ADDOP_I(c, loc, SWAP, PyList_GET_SIZE(state->pushed_locals) + 1); + } + + return SUCCESS; +} + +static int +pop_inlined_comprehension_state(struct compiler *c, location loc, + inlined_comprehension_state state) +{ + PyObject *k, *v; + Py_ssize_t pos = 0; + if (state.temp_symbols) { + while (PyDict_Next(state.temp_symbols, &pos, &k, &v)) { + if (PyDict_SetItem(c->u->u_ste->ste_symbols, k, v)) { + return ERROR; + } + } + Py_CLEAR(state.temp_symbols); + } + if (state.pushed_locals) { + // pop names we pushed to stack earlier + Py_ssize_t npops = PyList_GET_SIZE(state.pushed_locals); + // Preserve the list/dict/set result of the comprehension as TOS. This + // reverses the SWAP we did in push_inlined_comprehension_state to get + // the outermost iterable to TOS, so we can still just iterate + // pushed_locals in simple reverse order + ADDOP_I(c, loc, SWAP, npops + 1); + for (Py_ssize_t i = npops - 1; i >= 0; --i) { + k = PyList_GetItem(state.pushed_locals, i); + if (k == NULL) { + return ERROR; + } + ADDOP_NAME(c, loc, STORE_FAST_MAYBE_NULL, k, varnames); + } + Py_CLEAR(state.pushed_locals); + } + if (state.fast_hidden) { + while (PySet_Size(state.fast_hidden) > 0) { + PyObject *k = PySet_Pop(state.fast_hidden); + if (k == NULL) { + return ERROR; + } + // we set to False instead of clearing, so we can track which names + // were temporarily fast-locals and should use CO_FAST_HIDDEN + if (PyDict_SetItem(c->u->u_metadata.u_fasthidden, k, Py_False)) { + Py_DECREF(k); + return ERROR; + } + Py_DECREF(k); + } + Py_CLEAR(state.fast_hidden); + } + return SUCCESS; +} + +static inline int +compiler_comprehension_iter(struct compiler *c, location loc, + comprehension_ty comp) +{ + VISIT(c, expr, comp->iter); + if (comp->is_async) { + ADDOP(c, loc, GET_AITER); + } + else { + ADDOP(c, loc, GET_ITER); + } + return SUCCESS; +} + static int compiler_comprehension(struct compiler *c, expr_ty e, int type, identifier name, asdl_comprehension_seq *generators, expr_ty elt, expr_ty val) { PyCodeObject *co = NULL; + inlined_comprehension_state inline_state = {NULL, NULL}; comprehension_ty outermost; int scope_type = c->u->u_scope_type; - int is_async_generator = 0; int is_top_level_await = IS_TOP_LEVEL_AWAIT(c); - - outermost = (comprehension_ty) asdl_seq_GET(generators, 0); - if (compiler_enter_scope(c, name, COMPILER_SCOPE_COMPREHENSION, - (void *)e, e->lineno) < 0) - { + PySTEntryObject *entry = PySymtable_Lookup(c->c_st, (void *)e); + if (entry == NULL) { goto error; } + int is_inlined = entry->ste_comp_inlined; + int is_async_generator = entry->ste_coroutine; + location loc = LOC(e); - is_async_generator = c->u->u_ste->ste_coroutine; + outermost = (comprehension_ty) asdl_seq_GET(generators, 0); + if (is_inlined) { + if (compiler_comprehension_iter(c, loc, outermost)) { + goto error; + } + if (push_inlined_comprehension_state(c, loc, entry, &inline_state)) { + goto error; + } + } + else { + if (compiler_enter_scope(c, name, COMPILER_SCOPE_COMPREHENSION, + (void *)e, e->lineno) < 0) + { + goto error; + } + } + Py_CLEAR(entry); if (is_async_generator && type != COMP_GENEXP && scope_type != COMPILER_SCOPE_ASYNC_FUNCTION && @@ -5021,13 +5222,23 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type, } ADDOP_I(c, loc, op, 0); + if (is_inlined) { + ADDOP_I(c, loc, SWAP, 2); + } } if (compiler_comprehension_generator(c, loc, generators, 0, 0, - elt, val, type) < 0) { + elt, val, type, is_inlined) < 0) { goto error_in_scope; } + if (is_inlined) { + if (pop_inlined_comprehension_state(c, loc, inline_state)) { + goto error; + } + return SUCCESS; + } + if (type != COMP_GENEXP) { ADDOP(c, LOC(e), RETURN_VALUE); } @@ -5050,15 +5261,10 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type, if (compiler_make_closure(c, loc, co, 0) < 0) { goto error; } - Py_DECREF(co); + Py_CLEAR(co); - VISIT(c, expr, outermost->iter); - - loc = LOC(e); - if (outermost->is_async) { - ADDOP(c, loc, GET_AITER); - } else { - ADDOP(c, loc, GET_ITER); + if (compiler_comprehension_iter(c, loc, outermost)) { + goto error; } ADDOP_I(c, loc, CALL, 0); @@ -5071,9 +5277,15 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type, return SUCCESS; error_in_scope: - compiler_exit_scope(c); + if (!is_inlined) { + compiler_exit_scope(c); + } error: Py_XDECREF(co); + Py_XDECREF(entry); + Py_XDECREF(inline_state.pushed_locals); + Py_XDECREF(inline_state.temp_symbols); + Py_XDECREF(inline_state.fast_hidden); return ERROR; } @@ -6758,11 +6970,11 @@ _PyCompile_ConstCacheMergeOne(PyObject *const_cache, PyObject **obj) static int * -build_cellfixedoffsets(struct compiler_unit *u) +build_cellfixedoffsets(_PyCompile_CodeUnitMetadata *umd) { - int nlocals = (int)PyDict_GET_SIZE(u->u_metadata.u_varnames); - int ncellvars = (int)PyDict_GET_SIZE(u->u_metadata.u_cellvars); - int nfreevars = (int)PyDict_GET_SIZE(u->u_metadata.u_freevars); + int nlocals = (int)PyDict_GET_SIZE(umd->u_varnames); + int ncellvars = (int)PyDict_GET_SIZE(umd->u_cellvars); + int nfreevars = (int)PyDict_GET_SIZE(umd->u_freevars); int noffsets = ncellvars + nfreevars; int *fixed = PyMem_New(int, noffsets); @@ -6776,8 +6988,8 @@ build_cellfixedoffsets(struct compiler_unit *u) PyObject *varname, *cellindex; Py_ssize_t pos = 0; - while (PyDict_Next(u->u_metadata.u_cellvars, &pos, &varname, &cellindex)) { - PyObject *varindex = PyDict_GetItem(u->u_metadata.u_varnames, varname); + while (PyDict_Next(umd->u_cellvars, &pos, &varname, &cellindex)) { + PyObject *varindex = PyDict_GetItem(umd->u_varnames, varname); if (varindex != NULL) { assert(PyLong_AS_LONG(cellindex) < INT_MAX); assert(PyLong_AS_LONG(varindex) < INT_MAX); @@ -6791,17 +7003,17 @@ build_cellfixedoffsets(struct compiler_unit *u) } static int -insert_prefix_instructions(struct compiler_unit *u, basicblock *entryblock, +insert_prefix_instructions(_PyCompile_CodeUnitMetadata *umd, basicblock *entryblock, int *fixed, int nfreevars, int code_flags) { - assert(u->u_metadata.u_firstlineno > 0); + assert(umd->u_firstlineno > 0); /* Add the generator prefix instructions. */ if (code_flags & (CO_GENERATOR | CO_COROUTINE | CO_ASYNC_GENERATOR)) { cfg_instr make_gen = { .i_opcode = RETURN_GENERATOR, .i_oparg = 0, - .i_loc = LOCATION(u->u_metadata.u_firstlineno, u->u_metadata.u_firstlineno, -1, -1), + .i_loc = LOCATION(umd->u_firstlineno, umd->u_firstlineno, -1, -1), .i_target = NULL, }; RETURN_IF_ERROR(_PyBasicblock_InsertInstruction(entryblock, 0, &make_gen)); @@ -6815,12 +7027,12 @@ insert_prefix_instructions(struct compiler_unit *u, basicblock *entryblock, } /* Set up cells for any variable that escapes, to be put in a closure. */ - const int ncellvars = (int)PyDict_GET_SIZE(u->u_metadata.u_cellvars); + const int ncellvars = (int)PyDict_GET_SIZE(umd->u_cellvars); if (ncellvars) { - // u->u_metadata.u_cellvars has the cells out of order so we sort them + // umd->u_cellvars has the cells out of order so we sort them // before adding the MAKE_CELL instructions. Note that we // adjust for arg cells, which come first. - const int nvars = ncellvars + (int)PyDict_GET_SIZE(u->u_metadata.u_varnames); + const int nvars = ncellvars + (int)PyDict_GET_SIZE(umd->u_varnames); int *sorted = PyMem_RawCalloc(nvars, sizeof(int)); if (sorted == NULL) { PyErr_NoMemory(); @@ -6864,11 +7076,11 @@ insert_prefix_instructions(struct compiler_unit *u, basicblock *entryblock, } static int -fix_cell_offsets(struct compiler_unit *u, basicblock *entryblock, int *fixedmap) +fix_cell_offsets(_PyCompile_CodeUnitMetadata *umd, basicblock *entryblock, int *fixedmap) { - int nlocals = (int)PyDict_GET_SIZE(u->u_metadata.u_varnames); - int ncellvars = (int)PyDict_GET_SIZE(u->u_metadata.u_cellvars); - int nfreevars = (int)PyDict_GET_SIZE(u->u_metadata.u_freevars); + int nlocals = (int)PyDict_GET_SIZE(umd->u_varnames); + int ncellvars = (int)PyDict_GET_SIZE(umd->u_cellvars); + int nfreevars = (int)PyDict_GET_SIZE(umd->u_freevars); int noffsets = ncellvars + nfreevars; // First deal with duplicates (arg cells). @@ -6910,30 +7122,30 @@ fix_cell_offsets(struct compiler_unit *u, basicblock *entryblock, int *fixedmap) static int -prepare_localsplus(struct compiler_unit* u, cfg_builder *g, int code_flags) +prepare_localsplus(_PyCompile_CodeUnitMetadata *umd, cfg_builder *g, int code_flags) { - assert(PyDict_GET_SIZE(u->u_metadata.u_varnames) < INT_MAX); - assert(PyDict_GET_SIZE(u->u_metadata.u_cellvars) < INT_MAX); - assert(PyDict_GET_SIZE(u->u_metadata.u_freevars) < INT_MAX); - int nlocals = (int)PyDict_GET_SIZE(u->u_metadata.u_varnames); - int ncellvars = (int)PyDict_GET_SIZE(u->u_metadata.u_cellvars); - int nfreevars = (int)PyDict_GET_SIZE(u->u_metadata.u_freevars); + assert(PyDict_GET_SIZE(umd->u_varnames) < INT_MAX); + assert(PyDict_GET_SIZE(umd->u_cellvars) < INT_MAX); + assert(PyDict_GET_SIZE(umd->u_freevars) < INT_MAX); + int nlocals = (int)PyDict_GET_SIZE(umd->u_varnames); + int ncellvars = (int)PyDict_GET_SIZE(umd->u_cellvars); + int nfreevars = (int)PyDict_GET_SIZE(umd->u_freevars); assert(INT_MAX - nlocals - ncellvars > 0); assert(INT_MAX - nlocals - ncellvars - nfreevars > 0); int nlocalsplus = nlocals + ncellvars + nfreevars; - int* cellfixedoffsets = build_cellfixedoffsets(u); + int* cellfixedoffsets = build_cellfixedoffsets(umd); if (cellfixedoffsets == NULL) { return ERROR; } // This must be called before fix_cell_offsets(). - if (insert_prefix_instructions(u, g->g_entryblock, cellfixedoffsets, nfreevars, code_flags)) { + if (insert_prefix_instructions(umd, g->g_entryblock, cellfixedoffsets, nfreevars, code_flags)) { PyMem_Free(cellfixedoffsets); return ERROR; } - int numdropped = fix_cell_offsets(u, g->g_entryblock, cellfixedoffsets); + int numdropped = fix_cell_offsets(umd, g->g_entryblock, cellfixedoffsets); PyMem_Free(cellfixedoffsets); // At this point we're done with it. cellfixedoffsets = NULL; if (numdropped < 0) { @@ -6984,7 +7196,7 @@ optimize_and_assemble_code_unit(struct compiler_unit *u, PyObject *const_cache, } /** Assembly **/ - int nlocalsplus = prepare_localsplus(u, &g, code_flags); + int nlocalsplus = prepare_localsplus(&u->u_metadata, &g, code_flags); if (nlocalsplus < 0) { goto error; } @@ -6994,7 +7206,7 @@ optimize_and_assemble_code_unit(struct compiler_unit *u, PyObject *const_cache, goto error; } - _PyCfg_ConvertExceptionHandlersToNops(g.g_entryblock); + _PyCfg_ConvertPseudoOps(g.g_entryblock); /* Order of basic blocks must have been determined by now */ @@ -7161,11 +7373,6 @@ instructions_to_instr_sequence(PyObject *instructions, instr_sequence *seq) goto error; } } - if (seq->s_used && !IS_TERMINATOR_OPCODE(seq->s_instrs[seq->s_used-1].i_opcode)) { - if (instr_sequence_addop(seq, RETURN_VALUE, 0, NO_LOCATION) < 0) { - goto error; - } - } PyMem_Free(is_target); return SUCCESS; error: @@ -7268,9 +7475,10 @@ cfg_to_instructions(cfg_builder *g) PyObject * _PyCompile_CodeGen(PyObject *ast, PyObject *filename, PyCompilerFlags *pflags, - int optimize) + int optimize, int compile_mode) { PyObject *res = NULL; + PyObject *metadata = NULL; if (!PyAST_Check(ast)) { PyErr_SetString(PyExc_TypeError, "expected an AST"); @@ -7282,7 +7490,7 @@ _PyCompile_CodeGen(PyObject *ast, PyObject *filename, PyCompilerFlags *pflags, return NULL; } - mod_ty mod = PyAST_obj2mod(ast, arena, 0 /* exec */); + mod_ty mod = PyAST_obj2mod(ast, arena, compile_mode); if (mod == NULL || !_PyAST_Validate(mod)) { _PyArena_Free(arena); return NULL; @@ -7298,9 +7506,52 @@ _PyCompile_CodeGen(PyObject *ast, PyObject *filename, PyCompilerFlags *pflags, goto finally; } - res = instr_sequence_to_instructions(INSTR_SEQUENCE(c)); + _PyCompile_CodeUnitMetadata *umd = &c->u->u_metadata; + metadata = PyDict_New(); + if (metadata == NULL) { + goto finally; + } +#define SET_MATADATA_ITEM(key, value) \ + if (value != NULL) { \ + if (PyDict_SetItemString(metadata, key, value) < 0) goto finally; \ + } + + SET_MATADATA_ITEM("name", umd->u_name); + SET_MATADATA_ITEM("qualname", umd->u_qualname); + SET_MATADATA_ITEM("consts", umd->u_consts); + SET_MATADATA_ITEM("names", umd->u_names); + SET_MATADATA_ITEM("varnames", umd->u_varnames); + SET_MATADATA_ITEM("cellvars", umd->u_cellvars); + SET_MATADATA_ITEM("freevars", umd->u_freevars); +#undef SET_MATADATA_ITEM + +#define SET_MATADATA_INT(key, value) do { \ + PyObject *v = PyLong_FromLong((long)value); \ + if (v == NULL) goto finally; \ + int res = PyDict_SetItemString(metadata, key, v); \ + Py_XDECREF(v); \ + if (res < 0) goto finally; \ + } while (0); + + SET_MATADATA_INT("argcount", umd->u_argcount); + SET_MATADATA_INT("posonlyargcount", umd->u_posonlyargcount); + SET_MATADATA_INT("kwonlyargcount", umd->u_kwonlyargcount); +#undef SET_MATADATA_INT + + int addNone = mod->kind != Expression_kind; + if (add_return_at_end(c, addNone) < 0) { + goto finally; + } + + PyObject *insts = instr_sequence_to_instructions(INSTR_SEQUENCE(c)); + if (insts == NULL) { + goto finally; + } + res = PyTuple_Pack(2, insts, metadata); + Py_DECREF(insts); finally: + Py_XDECREF(metadata); compiler_exit_scope(c); compiler_free(c); _PyArena_Free(arena); @@ -7332,6 +7583,71 @@ _PyCompile_OptimizeCfg(PyObject *instructions, PyObject *consts) return res; } +int _PyCfg_JumpLabelsToTargets(basicblock *entryblock); + +PyCodeObject * +_PyCompile_Assemble(_PyCompile_CodeUnitMetadata *umd, PyObject *filename, + PyObject *instructions) +{ + PyCodeObject *co = NULL; + instr_sequence optimized_instrs; + memset(&optimized_instrs, 0, sizeof(instr_sequence)); + + PyObject *const_cache = PyDict_New(); + if (const_cache == NULL) { + return NULL; + } + + cfg_builder g; + if (instructions_to_cfg(instructions, &g) < 0) { + goto error; + } + + if (_PyCfg_JumpLabelsToTargets(g.g_entryblock) < 0) { + goto error; + } + + int code_flags = 0; + int nlocalsplus = prepare_localsplus(umd, &g, code_flags); + if (nlocalsplus < 0) { + goto error; + } + + int maxdepth = _PyCfg_Stackdepth(g.g_entryblock, code_flags); + if (maxdepth < 0) { + goto error; + } + + _PyCfg_ConvertPseudoOps(g.g_entryblock); + + /* Order of basic blocks must have been determined by now */ + + if (_PyCfg_ResolveJumps(&g) < 0) { + goto error; + } + + /* Can't modify the bytecode after computing jump offsets. */ + + if (cfg_to_instr_sequence(&g, &optimized_instrs) < 0) { + goto error; + } + + PyObject *consts = consts_dict_keys_inorder(umd->u_consts); + if (consts == NULL) { + goto error; + } + co = _PyAssemble_MakeCodeObject(umd, const_cache, + consts, maxdepth, &optimized_instrs, + nlocalsplus, code_flags, filename); + Py_DECREF(consts); + +error: + Py_DECREF(const_cache); + _PyCfgBuilder_Fini(&g); + instr_sequence_fini(&optimized_instrs); + return co; +} + /* Retained for API compatibility. * Optimization is now done in _PyCfg_OptimizeCodeUnit */ diff --git a/Python/context.c b/Python/context.c index 5d385508405ede..1ffae9871be7b3 100644 --- a/Python/context.c +++ b/Python/context.c @@ -1309,7 +1309,7 @@ _PyContext_Init(PyInterpreterState *interp) PyObject *missing = get_token_missing(); if (PyDict_SetItemString( - PyContextToken_Type.tp_dict, "MISSING", missing)) + _PyType_GetDict(&PyContextToken_Type), "MISSING", missing)) { Py_DECREF(missing); return _PyStatus_ERR("can't init context types"); diff --git a/Python/errors.c b/Python/errors.c index 7fc267385c569b..a8000ac94918db 100644 --- a/Python/errors.c +++ b/Python/errors.c @@ -6,7 +6,7 @@ #include "pycore_initconfig.h" // _PyStatus_ERR() #include "pycore_pyerrors.h" // _PyErr_Format() #include "pycore_pystate.h" // _PyThreadState_GET() -#include "pycore_structseq.h" // _PyStructSequence_FiniType() +#include "pycore_structseq.h" // _PyStructSequence_FiniBuiltin() #include "pycore_sysmodule.h" // _PySys_Audit() #include "pycore_traceback.h" // _PyTraceBack_FromFrame() @@ -1342,8 +1342,9 @@ static PyStructSequence_Desc UnraisableHookArgs_desc = { PyStatus _PyErr_InitTypes(PyInterpreterState *interp) { - if (_PyStructSequence_InitBuiltin(&UnraisableHookArgsType, - &UnraisableHookArgs_desc) < 0) { + if (_PyStructSequence_InitBuiltin(interp, &UnraisableHookArgsType, + &UnraisableHookArgs_desc) < 0) + { return _PyStatus_ERR("failed to initialize UnraisableHookArgs type"); } return _PyStatus_OK(); @@ -1353,11 +1354,7 @@ _PyErr_InitTypes(PyInterpreterState *interp) void _PyErr_FiniTypes(PyInterpreterState *interp) { - if (!_Py_IsMainInterpreter(interp)) { - return; - } - - _PyStructSequence_FiniType(&UnraisableHookArgsType); + _PyStructSequence_FiniBuiltin(interp, &UnraisableHookArgsType); } diff --git a/Python/flowgraph.c b/Python/flowgraph.c index 6f83a910cab392..7f790b79d2844f 100644 --- a/Python/flowgraph.c +++ b/Python/flowgraph.c @@ -223,6 +223,15 @@ dump_basicblock(const basicblock *b) } } } + +void +_PyCfgBuilder_DumpGraph(const basicblock *entryblock) +{ + for (const basicblock *b = entryblock; b != NULL; b = b->b_next) { + dump_basicblock(b); + } +} + #endif @@ -592,6 +601,11 @@ translate_jump_labels_to_targets(basicblock *entryblock) return SUCCESS; } +int +_PyCfg_JumpLabelsToTargets(basicblock *entryblock) +{ + return translate_jump_labels_to_targets(entryblock); +} static int mark_except_handlers(basicblock *entryblock) { @@ -1275,7 +1289,9 @@ swaptimize(basicblock *block, int *ix) // - can't invoke arbitrary code (besides finalizers) // - only touch the TOS (and pop it when finished) #define SWAPPABLE(opcode) \ - ((opcode) == STORE_FAST || (opcode) == POP_TOP) + ((opcode) == STORE_FAST || \ + (opcode) == STORE_FAST_MAYBE_NULL || \ + (opcode) == POP_TOP) static int next_swappable_instruction(basicblock *block, int i, int lineno) @@ -1586,6 +1602,8 @@ scan_block_for_locals(basicblock *b, basicblock ***sp) uint64_t bit = (uint64_t)1 << instr->i_oparg; switch (instr->i_opcode) { case DELETE_FAST: + case LOAD_FAST_AND_CLEAR: + case STORE_FAST_MAYBE_NULL: unsafe_mask |= bit; break; case STORE_FAST: @@ -1625,7 +1643,8 @@ fast_scan_many_locals(basicblock *entryblock, int nlocals) Py_ssize_t blocknum = 0; // state[i - 64] == blocknum if local i is guaranteed to // be initialized, i.e., if it has had a previous LOAD_FAST or - // STORE_FAST within that basicblock (not followed by DELETE_FAST). + // STORE_FAST within that basicblock (not followed by + // DELETE_FAST/LOAD_FAST_AND_CLEAR/STORE_FAST_MAYBE_NULL). for (basicblock *b = entryblock; b != NULL; b = b->b_next) { blocknum++; for (int i = 0; i < b->b_iused; i++) { @@ -1639,6 +1658,8 @@ fast_scan_many_locals(basicblock *entryblock, int nlocals) assert(arg >= 0); switch (instr->i_opcode) { case DELETE_FAST: + case LOAD_FAST_AND_CLEAR: + case STORE_FAST_MAYBE_NULL: states[arg - 64] = blocknum - 1; break; case STORE_FAST: @@ -1961,7 +1982,7 @@ push_cold_blocks_to_end(cfg_builder *g, int code_flags) { } void -_PyCfg_ConvertExceptionHandlersToNops(basicblock *entryblock) +_PyCfg_ConvertPseudoOps(basicblock *entryblock) { for (basicblock *b = entryblock; b != NULL; b = b->b_next) { for (int i = 0; i < b->b_iused; i++) { @@ -1969,6 +1990,9 @@ _PyCfg_ConvertExceptionHandlersToNops(basicblock *entryblock) if (is_block_push(instr) || instr->i_opcode == POP_BLOCK) { INSTR_SET_OP0(instr, NOP); } + else if (instr->i_opcode == STORE_FAST_MAYBE_NULL) { + instr->i_opcode = STORE_FAST; + } } } for (basicblock *b = entryblock; b != NULL; b = b->b_next) { diff --git a/Python/frame.c b/Python/frame.c index c2c0be30113912..d792b92fa57560 100644 --- a/Python/frame.c +++ b/Python/frame.c @@ -144,8 +144,24 @@ _PyFrame_ClearExceptCode(_PyInterpreterFrame *frame) Py_DECREF(frame->f_funcobj); } +/* Unstable API functions */ + +PyCodeObject * +PyUnstable_InterpreterFrame_GetCode(struct _PyInterpreterFrame *frame) +{ + PyCodeObject *code = frame->f_code; + Py_INCREF(code); + return code; +} + +int +PyUnstable_InterpreterFrame_GetLasti(struct _PyInterpreterFrame *frame) +{ + return _PyInterpreterFrame_LASTI(frame) * sizeof(_Py_CODEUNIT); +} + int -_PyInterpreterFrame_GetLine(_PyInterpreterFrame *frame) +PyUnstable_InterpreterFrame_GetLine(_PyInterpreterFrame *frame) { int addr = _PyInterpreterFrame_LASTI(frame) * sizeof(_Py_CODEUNIT); return PyCode_Addr2Line(frame->f_code, addr); diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index 9c6439ae930706..b40cb1f8272f43 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -92,13 +92,25 @@ DISPATCH(); } + TARGET(LOAD_FAST_AND_CLEAR) { + PyObject *value; + #line 198 "Python/bytecodes.c" + value = GETLOCAL(oparg); + // do not use SETLOCAL here, it decrefs the old value + GETLOCAL(oparg) = NULL; + #line 102 "Python/generated_cases.c.h" + STACK_GROW(1); + stack_pointer[-1] = value; + DISPATCH(); + } + TARGET(LOAD_CONST) { PREDICTED(LOAD_CONST); PyObject *value; - #line 198 "Python/bytecodes.c" + #line 204 "Python/bytecodes.c" value = GETITEM(frame->f_code->co_consts, oparg); Py_INCREF(value); - #line 102 "Python/generated_cases.c.h" + #line 114 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = value; DISPATCH(); @@ -106,9 +118,9 @@ TARGET(STORE_FAST) { PyObject *value = stack_pointer[-1]; - #line 203 "Python/bytecodes.c" + #line 209 "Python/bytecodes.c" SETLOCAL(oparg, value); - #line 112 "Python/generated_cases.c.h" + #line 124 "Python/generated_cases.c.h" STACK_SHRINK(1); DISPATCH(); } @@ -122,7 +134,7 @@ value = GETLOCAL(oparg); assert(value != NULL); Py_INCREF(value); - #line 126 "Python/generated_cases.c.h" + #line 138 "Python/generated_cases.c.h" _tmp_2 = value; } oparg = (next_instr++)->op.arg; @@ -132,7 +144,7 @@ value = GETLOCAL(oparg); assert(value != NULL); Py_INCREF(value); - #line 136 "Python/generated_cases.c.h" + #line 148 "Python/generated_cases.c.h" _tmp_1 = value; } STACK_GROW(2); @@ -150,16 +162,16 @@ value = GETLOCAL(oparg); assert(value != NULL); Py_INCREF(value); - #line 154 "Python/generated_cases.c.h" + #line 166 "Python/generated_cases.c.h" _tmp_2 = value; } oparg = (next_instr++)->op.arg; { PyObject *value; - #line 198 "Python/bytecodes.c" + #line 204 "Python/bytecodes.c" value = GETITEM(frame->f_code->co_consts, oparg); Py_INCREF(value); - #line 163 "Python/generated_cases.c.h" + #line 175 "Python/generated_cases.c.h" _tmp_1 = value; } STACK_GROW(2); @@ -172,9 +184,9 @@ PyObject *_tmp_1 = stack_pointer[-1]; { PyObject *value = _tmp_1; - #line 203 "Python/bytecodes.c" + #line 209 "Python/bytecodes.c" SETLOCAL(oparg, value); - #line 178 "Python/generated_cases.c.h" + #line 190 "Python/generated_cases.c.h" } oparg = (next_instr++)->op.arg; { @@ -183,7 +195,7 @@ value = GETLOCAL(oparg); assert(value != NULL); Py_INCREF(value); - #line 187 "Python/generated_cases.c.h" + #line 199 "Python/generated_cases.c.h" _tmp_1 = value; } stack_pointer[-1] = _tmp_1; @@ -195,16 +207,16 @@ PyObject *_tmp_2 = stack_pointer[-2]; { PyObject *value = _tmp_1; - #line 203 "Python/bytecodes.c" + #line 209 "Python/bytecodes.c" SETLOCAL(oparg, value); - #line 201 "Python/generated_cases.c.h" + #line 213 "Python/generated_cases.c.h" } oparg = (next_instr++)->op.arg; { PyObject *value = _tmp_2; - #line 203 "Python/bytecodes.c" + #line 209 "Python/bytecodes.c" SETLOCAL(oparg, value); - #line 208 "Python/generated_cases.c.h" + #line 220 "Python/generated_cases.c.h" } STACK_SHRINK(2); DISPATCH(); @@ -215,10 +227,10 @@ PyObject *_tmp_2; { PyObject *value; - #line 198 "Python/bytecodes.c" + #line 204 "Python/bytecodes.c" value = GETITEM(frame->f_code->co_consts, oparg); Py_INCREF(value); - #line 222 "Python/generated_cases.c.h" + #line 234 "Python/generated_cases.c.h" _tmp_2 = value; } oparg = (next_instr++)->op.arg; @@ -228,7 +240,7 @@ value = GETLOCAL(oparg); assert(value != NULL); Py_INCREF(value); - #line 232 "Python/generated_cases.c.h" + #line 244 "Python/generated_cases.c.h" _tmp_1 = value; } STACK_GROW(2); @@ -239,8 +251,8 @@ TARGET(POP_TOP) { PyObject *value = stack_pointer[-1]; - #line 213 "Python/bytecodes.c" - #line 244 "Python/generated_cases.c.h" + #line 219 "Python/bytecodes.c" + #line 256 "Python/generated_cases.c.h" Py_DECREF(value); STACK_SHRINK(1); DISPATCH(); @@ -248,9 +260,9 @@ TARGET(PUSH_NULL) { PyObject *res; - #line 217 "Python/bytecodes.c" + #line 223 "Python/bytecodes.c" res = NULL; - #line 254 "Python/generated_cases.c.h" + #line 266 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = res; DISPATCH(); @@ -261,14 +273,14 @@ PyObject *_tmp_2 = stack_pointer[-2]; { PyObject *value = _tmp_1; - #line 213 "Python/bytecodes.c" - #line 266 "Python/generated_cases.c.h" + #line 219 "Python/bytecodes.c" + #line 278 "Python/generated_cases.c.h" Py_DECREF(value); } { PyObject *value = _tmp_2; - #line 213 "Python/bytecodes.c" - #line 272 "Python/generated_cases.c.h" + #line 219 "Python/bytecodes.c" + #line 284 "Python/generated_cases.c.h" Py_DECREF(value); } STACK_SHRINK(2); @@ -278,7 +290,7 @@ TARGET(INSTRUMENTED_END_FOR) { PyObject *value = stack_pointer[-1]; PyObject *receiver = stack_pointer[-2]; - #line 223 "Python/bytecodes.c" + #line 229 "Python/bytecodes.c" /* Need to create a fake StopIteration error here, * to conform to PEP 380 */ if (PyGen_Check(receiver)) { @@ -288,7 +300,7 @@ } PyErr_SetRaisedException(NULL); } - #line 292 "Python/generated_cases.c.h" + #line 304 "Python/generated_cases.c.h" Py_DECREF(receiver); Py_DECREF(value); STACK_SHRINK(2); @@ -298,9 +310,9 @@ TARGET(END_SEND) { PyObject *value = stack_pointer[-1]; PyObject *receiver = stack_pointer[-2]; - #line 236 "Python/bytecodes.c" + #line 242 "Python/bytecodes.c" Py_DECREF(receiver); - #line 304 "Python/generated_cases.c.h" + #line 316 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = value; DISPATCH(); @@ -309,7 +321,7 @@ TARGET(INSTRUMENTED_END_SEND) { PyObject *value = stack_pointer[-1]; PyObject *receiver = stack_pointer[-2]; - #line 240 "Python/bytecodes.c" + #line 246 "Python/bytecodes.c" if (PyGen_Check(receiver) || PyCoro_CheckExact(receiver)) { PyErr_SetObject(PyExc_StopIteration, value); if (monitor_stop_iteration(tstate, frame, next_instr-1)) { @@ -318,7 +330,7 @@ PyErr_SetRaisedException(NULL); } Py_DECREF(receiver); - #line 322 "Python/generated_cases.c.h" + #line 334 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = value; DISPATCH(); @@ -327,13 +339,13 @@ TARGET(UNARY_NEGATIVE) { PyObject *value = stack_pointer[-1]; PyObject *res; - #line 251 "Python/bytecodes.c" + #line 257 "Python/bytecodes.c" res = PyNumber_Negative(value); - #line 333 "Python/generated_cases.c.h" + #line 345 "Python/generated_cases.c.h" Py_DECREF(value); - #line 253 "Python/bytecodes.c" + #line 259 "Python/bytecodes.c" if (res == NULL) goto pop_1_error; - #line 337 "Python/generated_cases.c.h" + #line 349 "Python/generated_cases.c.h" stack_pointer[-1] = res; DISPATCH(); } @@ -341,11 +353,11 @@ TARGET(UNARY_NOT) { PyObject *value = stack_pointer[-1]; PyObject *res; - #line 257 "Python/bytecodes.c" + #line 263 "Python/bytecodes.c" int err = PyObject_IsTrue(value); - #line 347 "Python/generated_cases.c.h" + #line 359 "Python/generated_cases.c.h" Py_DECREF(value); - #line 259 "Python/bytecodes.c" + #line 265 "Python/bytecodes.c" if (err < 0) goto pop_1_error; if (err == 0) { res = Py_True; @@ -354,7 +366,7 @@ res = Py_False; } Py_INCREF(res); - #line 358 "Python/generated_cases.c.h" + #line 370 "Python/generated_cases.c.h" stack_pointer[-1] = res; DISPATCH(); } @@ -362,13 +374,13 @@ TARGET(UNARY_INVERT) { PyObject *value = stack_pointer[-1]; PyObject *res; - #line 270 "Python/bytecodes.c" + #line 276 "Python/bytecodes.c" res = PyNumber_Invert(value); - #line 368 "Python/generated_cases.c.h" + #line 380 "Python/generated_cases.c.h" Py_DECREF(value); - #line 272 "Python/bytecodes.c" + #line 278 "Python/bytecodes.c" if (res == NULL) goto pop_1_error; - #line 372 "Python/generated_cases.c.h" + #line 384 "Python/generated_cases.c.h" stack_pointer[-1] = res; DISPATCH(); } @@ -377,7 +389,7 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *prod; - #line 289 "Python/bytecodes.c" + #line 295 "Python/bytecodes.c" DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP); STAT_INC(BINARY_OP, hit); @@ -385,7 +397,7 @@ _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); if (prod == NULL) goto pop_2_error; - #line 389 "Python/generated_cases.c.h" + #line 401 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = prod; next_instr += 1; @@ -396,14 +408,14 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *prod; - #line 299 "Python/bytecodes.c" + #line 305 "Python/bytecodes.c" DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP); STAT_INC(BINARY_OP, hit); double dprod = ((PyFloatObject *)left)->ob_fval * ((PyFloatObject *)right)->ob_fval; DECREF_INPUTS_AND_REUSE_FLOAT(left, right, dprod, prod); - #line 407 "Python/generated_cases.c.h" + #line 419 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = prod; next_instr += 1; @@ -414,7 +426,7 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *sub; - #line 308 "Python/bytecodes.c" + #line 314 "Python/bytecodes.c" DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP); STAT_INC(BINARY_OP, hit); @@ -422,7 +434,7 @@ _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); if (sub == NULL) goto pop_2_error; - #line 426 "Python/generated_cases.c.h" + #line 438 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = sub; next_instr += 1; @@ -433,13 +445,13 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *sub; - #line 318 "Python/bytecodes.c" + #line 324 "Python/bytecodes.c" DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP); STAT_INC(BINARY_OP, hit); double dsub = ((PyFloatObject *)left)->ob_fval - ((PyFloatObject *)right)->ob_fval; DECREF_INPUTS_AND_REUSE_FLOAT(left, right, dsub, sub); - #line 443 "Python/generated_cases.c.h" + #line 455 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = sub; next_instr += 1; @@ -450,7 +462,7 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *res; - #line 326 "Python/bytecodes.c" + #line 332 "Python/bytecodes.c" DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP); DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); STAT_INC(BINARY_OP, hit); @@ -458,7 +470,7 @@ _Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc); _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc); if (res == NULL) goto pop_2_error; - #line 462 "Python/generated_cases.c.h" + #line 474 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = res; next_instr += 1; @@ -468,7 +480,7 @@ TARGET(BINARY_OP_INPLACE_ADD_UNICODE) { PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; - #line 342 "Python/bytecodes.c" + #line 348 "Python/bytecodes.c" DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP); DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); _Py_CODEUNIT true_next = next_instr[INLINE_CACHE_ENTRIES_BINARY_OP]; @@ -495,7 +507,7 @@ if (*target_local == NULL) goto pop_2_error; // The STORE_FAST is already done. JUMPBY(INLINE_CACHE_ENTRIES_BINARY_OP + 1); - #line 499 "Python/generated_cases.c.h" + #line 511 "Python/generated_cases.c.h" STACK_SHRINK(2); DISPATCH(); } @@ -504,14 +516,14 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *sum; - #line 371 "Python/bytecodes.c" + #line 377 "Python/bytecodes.c" DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); STAT_INC(BINARY_OP, hit); double dsum = ((PyFloatObject *)left)->ob_fval + ((PyFloatObject *)right)->ob_fval; DECREF_INPUTS_AND_REUSE_FLOAT(left, right, dsum, sum); - #line 515 "Python/generated_cases.c.h" + #line 527 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = sum; next_instr += 1; @@ -522,7 +534,7 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *sum; - #line 380 "Python/bytecodes.c" + #line 386 "Python/bytecodes.c" DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); STAT_INC(BINARY_OP, hit); @@ -530,7 +542,7 @@ _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); if (sum == NULL) goto pop_2_error; - #line 534 "Python/generated_cases.c.h" + #line 546 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = sum; next_instr += 1; @@ -543,7 +555,7 @@ PyObject *sub = stack_pointer[-1]; PyObject *container = stack_pointer[-2]; PyObject *res; - #line 398 "Python/bytecodes.c" + #line 404 "Python/bytecodes.c" #if ENABLE_SPECIALIZATION _PyBinarySubscrCache *cache = (_PyBinarySubscrCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -555,12 +567,12 @@ DECREMENT_ADAPTIVE_COUNTER(cache->counter); #endif /* ENABLE_SPECIALIZATION */ res = PyObject_GetItem(container, sub); - #line 559 "Python/generated_cases.c.h" + #line 571 "Python/generated_cases.c.h" Py_DECREF(container); Py_DECREF(sub); - #line 410 "Python/bytecodes.c" + #line 416 "Python/bytecodes.c" if (res == NULL) goto pop_2_error; - #line 564 "Python/generated_cases.c.h" + #line 576 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = res; next_instr += 1; @@ -572,7 +584,7 @@ PyObject *start = stack_pointer[-2]; PyObject *container = stack_pointer[-3]; PyObject *res; - #line 414 "Python/bytecodes.c" + #line 420 "Python/bytecodes.c" PyObject *slice = _PyBuildSlice_ConsumeRefs(start, stop); // Can't use ERROR_IF() here, because we haven't // DECREF'ed container yet, and we still own slice. @@ -585,7 +597,7 @@ } Py_DECREF(container); if (res == NULL) goto pop_3_error; - #line 589 "Python/generated_cases.c.h" + #line 601 "Python/generated_cases.c.h" STACK_SHRINK(2); stack_pointer[-1] = res; DISPATCH(); @@ -596,7 +608,7 @@ PyObject *start = stack_pointer[-2]; PyObject *container = stack_pointer[-3]; PyObject *v = stack_pointer[-4]; - #line 429 "Python/bytecodes.c" + #line 435 "Python/bytecodes.c" PyObject *slice = _PyBuildSlice_ConsumeRefs(start, stop); int err; if (slice == NULL) { @@ -609,7 +621,7 @@ Py_DECREF(v); Py_DECREF(container); if (err) goto pop_4_error; - #line 613 "Python/generated_cases.c.h" + #line 625 "Python/generated_cases.c.h" STACK_SHRINK(4); DISPATCH(); } @@ -618,7 +630,7 @@ PyObject *sub = stack_pointer[-1]; PyObject *list = stack_pointer[-2]; PyObject *res; - #line 444 "Python/bytecodes.c" + #line 450 "Python/bytecodes.c" DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); DEOPT_IF(!PyList_CheckExact(list), BINARY_SUBSCR); @@ -632,7 +644,7 @@ Py_INCREF(res); _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); Py_DECREF(list); - #line 636 "Python/generated_cases.c.h" + #line 648 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = res; next_instr += 1; @@ -643,7 +655,7 @@ PyObject *sub = stack_pointer[-1]; PyObject *tuple = stack_pointer[-2]; PyObject *res; - #line 460 "Python/bytecodes.c" + #line 466 "Python/bytecodes.c" DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); DEOPT_IF(!PyTuple_CheckExact(tuple), BINARY_SUBSCR); @@ -657,7 +669,7 @@ Py_INCREF(res); _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); Py_DECREF(tuple); - #line 661 "Python/generated_cases.c.h" + #line 673 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = res; next_instr += 1; @@ -668,7 +680,7 @@ PyObject *sub = stack_pointer[-1]; PyObject *dict = stack_pointer[-2]; PyObject *res; - #line 476 "Python/bytecodes.c" + #line 482 "Python/bytecodes.c" DEOPT_IF(!PyDict_CheckExact(dict), BINARY_SUBSCR); STAT_INC(BINARY_SUBSCR, hit); res = PyDict_GetItemWithError(dict, sub); @@ -676,14 +688,14 @@ if (!_PyErr_Occurred(tstate)) { _PyErr_SetKeyError(sub); } - #line 680 "Python/generated_cases.c.h" + #line 692 "Python/generated_cases.c.h" Py_DECREF(dict); Py_DECREF(sub); - #line 484 "Python/bytecodes.c" + #line 490 "Python/bytecodes.c" if (true) goto pop_2_error; } Py_INCREF(res); // Do this before DECREF'ing dict, sub - #line 687 "Python/generated_cases.c.h" + #line 699 "Python/generated_cases.c.h" Py_DECREF(dict); Py_DECREF(sub); STACK_SHRINK(1); @@ -695,7 +707,7 @@ TARGET(BINARY_SUBSCR_GETITEM) { PyObject *sub = stack_pointer[-1]; PyObject *container = stack_pointer[-2]; - #line 491 "Python/bytecodes.c" + #line 497 "Python/bytecodes.c" PyTypeObject *tp = Py_TYPE(container); DEOPT_IF(!PyType_HasFeature(tp, Py_TPFLAGS_HEAPTYPE), BINARY_SUBSCR); PyHeapTypeObject *ht = (PyHeapTypeObject *)tp; @@ -717,15 +729,15 @@ JUMPBY(INLINE_CACHE_ENTRIES_BINARY_SUBSCR); frame->return_offset = 0; DISPATCH_INLINED(new_frame); - #line 721 "Python/generated_cases.c.h" + #line 733 "Python/generated_cases.c.h" } TARGET(LIST_APPEND) { PyObject *v = stack_pointer[-1]; PyObject *list = stack_pointer[-(2 + (oparg-1))]; - #line 515 "Python/bytecodes.c" + #line 521 "Python/bytecodes.c" if (_PyList_AppendTakeRef((PyListObject *)list, v) < 0) goto pop_1_error; - #line 729 "Python/generated_cases.c.h" + #line 741 "Python/generated_cases.c.h" STACK_SHRINK(1); PREDICT(JUMP_BACKWARD); DISPATCH(); @@ -734,13 +746,13 @@ TARGET(SET_ADD) { PyObject *v = stack_pointer[-1]; PyObject *set = stack_pointer[-(2 + (oparg-1))]; - #line 520 "Python/bytecodes.c" + #line 526 "Python/bytecodes.c" int err = PySet_Add(set, v); - #line 740 "Python/generated_cases.c.h" + #line 752 "Python/generated_cases.c.h" Py_DECREF(v); - #line 522 "Python/bytecodes.c" + #line 528 "Python/bytecodes.c" if (err) goto pop_1_error; - #line 744 "Python/generated_cases.c.h" + #line 756 "Python/generated_cases.c.h" STACK_SHRINK(1); PREDICT(JUMP_BACKWARD); DISPATCH(); @@ -753,7 +765,7 @@ PyObject *container = stack_pointer[-2]; PyObject *v = stack_pointer[-3]; uint16_t counter = read_u16(&next_instr[0].cache); - #line 533 "Python/bytecodes.c" + #line 539 "Python/bytecodes.c" #if ENABLE_SPECIALIZATION if (ADAPTIVE_COUNTER_IS_ZERO(counter)) { next_instr--; @@ -768,13 +780,13 @@ #endif /* ENABLE_SPECIALIZATION */ /* container[sub] = v */ int err = PyObject_SetItem(container, sub, v); - #line 772 "Python/generated_cases.c.h" + #line 784 "Python/generated_cases.c.h" Py_DECREF(v); Py_DECREF(container); Py_DECREF(sub); - #line 548 "Python/bytecodes.c" + #line 554 "Python/bytecodes.c" if (err) goto pop_3_error; - #line 778 "Python/generated_cases.c.h" + #line 790 "Python/generated_cases.c.h" STACK_SHRINK(3); next_instr += 1; DISPATCH(); @@ -784,7 +796,7 @@ PyObject *sub = stack_pointer[-1]; PyObject *list = stack_pointer[-2]; PyObject *value = stack_pointer[-3]; - #line 552 "Python/bytecodes.c" + #line 558 "Python/bytecodes.c" DEOPT_IF(!PyLong_CheckExact(sub), STORE_SUBSCR); DEOPT_IF(!PyList_CheckExact(list), STORE_SUBSCR); @@ -801,7 +813,7 @@ Py_DECREF(old_value); _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); Py_DECREF(list); - #line 805 "Python/generated_cases.c.h" + #line 817 "Python/generated_cases.c.h" STACK_SHRINK(3); next_instr += 1; DISPATCH(); @@ -811,13 +823,13 @@ PyObject *sub = stack_pointer[-1]; PyObject *dict = stack_pointer[-2]; PyObject *value = stack_pointer[-3]; - #line 571 "Python/bytecodes.c" + #line 577 "Python/bytecodes.c" DEOPT_IF(!PyDict_CheckExact(dict), STORE_SUBSCR); STAT_INC(STORE_SUBSCR, hit); int err = _PyDict_SetItem_Take2((PyDictObject *)dict, sub, value); Py_DECREF(dict); if (err) goto pop_3_error; - #line 821 "Python/generated_cases.c.h" + #line 833 "Python/generated_cases.c.h" STACK_SHRINK(3); next_instr += 1; DISPATCH(); @@ -826,15 +838,15 @@ TARGET(DELETE_SUBSCR) { PyObject *sub = stack_pointer[-1]; PyObject *container = stack_pointer[-2]; - #line 579 "Python/bytecodes.c" + #line 585 "Python/bytecodes.c" /* del container[sub] */ int err = PyObject_DelItem(container, sub); - #line 833 "Python/generated_cases.c.h" + #line 845 "Python/generated_cases.c.h" Py_DECREF(container); Py_DECREF(sub); - #line 582 "Python/bytecodes.c" + #line 588 "Python/bytecodes.c" if (err) goto pop_2_error; - #line 838 "Python/generated_cases.c.h" + #line 850 "Python/generated_cases.c.h" STACK_SHRINK(2); DISPATCH(); } @@ -842,14 +854,14 @@ TARGET(CALL_INTRINSIC_1) { PyObject *value = stack_pointer[-1]; PyObject *res; - #line 586 "Python/bytecodes.c" + #line 592 "Python/bytecodes.c" assert(oparg <= MAX_INTRINSIC_1); res = _PyIntrinsics_UnaryFunctions[oparg](tstate, value); - #line 849 "Python/generated_cases.c.h" + #line 861 "Python/generated_cases.c.h" Py_DECREF(value); - #line 589 "Python/bytecodes.c" + #line 595 "Python/bytecodes.c" if (res == NULL) goto pop_1_error; - #line 853 "Python/generated_cases.c.h" + #line 865 "Python/generated_cases.c.h" stack_pointer[-1] = res; DISPATCH(); } @@ -858,15 +870,15 @@ PyObject *value1 = stack_pointer[-1]; PyObject *value2 = stack_pointer[-2]; PyObject *res; - #line 593 "Python/bytecodes.c" + #line 599 "Python/bytecodes.c" assert(oparg <= MAX_INTRINSIC_2); res = _PyIntrinsics_BinaryFunctions[oparg](tstate, value2, value1); - #line 865 "Python/generated_cases.c.h" + #line 877 "Python/generated_cases.c.h" Py_DECREF(value2); Py_DECREF(value1); - #line 596 "Python/bytecodes.c" + #line 602 "Python/bytecodes.c" if (res == NULL) goto pop_2_error; - #line 870 "Python/generated_cases.c.h" + #line 882 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = res; DISPATCH(); @@ -874,7 +886,7 @@ TARGET(RAISE_VARARGS) { PyObject **args = (stack_pointer - oparg); - #line 600 "Python/bytecodes.c" + #line 606 "Python/bytecodes.c" PyObject *cause = NULL, *exc = NULL; switch (oparg) { case 2: @@ -892,12 +904,12 @@ break; } if (true) { STACK_SHRINK(oparg); goto error; } - #line 896 "Python/generated_cases.c.h" + #line 908 "Python/generated_cases.c.h" } TARGET(INTERPRETER_EXIT) { PyObject *retval = stack_pointer[-1]; - #line 620 "Python/bytecodes.c" + #line 626 "Python/bytecodes.c" assert(frame == &entry_frame); assert(_PyFrame_IsIncomplete(frame)); STACK_SHRINK(1); // Since we're not going to DISPATCH() @@ -908,12 +920,12 @@ assert(!_PyErr_Occurred(tstate)); _Py_LeaveRecursiveCallTstate(tstate); return retval; - #line 912 "Python/generated_cases.c.h" + #line 924 "Python/generated_cases.c.h" } TARGET(RETURN_VALUE) { PyObject *retval = stack_pointer[-1]; - #line 633 "Python/bytecodes.c" + #line 639 "Python/bytecodes.c" STACK_SHRINK(1); assert(EMPTY()); _PyFrame_SetStackPointer(frame, stack_pointer); @@ -926,12 +938,12 @@ frame->prev_instr += frame->return_offset; _PyFrame_StackPush(frame, retval); goto resume_frame; - #line 930 "Python/generated_cases.c.h" + #line 942 "Python/generated_cases.c.h" } TARGET(INSTRUMENTED_RETURN_VALUE) { PyObject *retval = stack_pointer[-1]; - #line 648 "Python/bytecodes.c" + #line 654 "Python/bytecodes.c" int err = _Py_call_instrumentation_arg( tstate, PY_MONITORING_EVENT_PY_RETURN, frame, next_instr-1, retval); @@ -948,11 +960,11 @@ frame->prev_instr += frame->return_offset; _PyFrame_StackPush(frame, retval); goto resume_frame; - #line 952 "Python/generated_cases.c.h" + #line 964 "Python/generated_cases.c.h" } TARGET(RETURN_CONST) { - #line 667 "Python/bytecodes.c" + #line 673 "Python/bytecodes.c" PyObject *retval = GETITEM(frame->f_code->co_consts, oparg); Py_INCREF(retval); assert(EMPTY()); @@ -966,11 +978,11 @@ frame->prev_instr += frame->return_offset; _PyFrame_StackPush(frame, retval); goto resume_frame; - #line 970 "Python/generated_cases.c.h" + #line 982 "Python/generated_cases.c.h" } TARGET(INSTRUMENTED_RETURN_CONST) { - #line 683 "Python/bytecodes.c" + #line 689 "Python/bytecodes.c" PyObject *retval = GETITEM(frame->f_code->co_consts, oparg); int err = _Py_call_instrumentation_arg( tstate, PY_MONITORING_EVENT_PY_RETURN, @@ -988,13 +1000,13 @@ frame->prev_instr += frame->return_offset; _PyFrame_StackPush(frame, retval); goto resume_frame; - #line 992 "Python/generated_cases.c.h" + #line 1004 "Python/generated_cases.c.h" } TARGET(GET_AITER) { PyObject *obj = stack_pointer[-1]; PyObject *iter; - #line 703 "Python/bytecodes.c" + #line 709 "Python/bytecodes.c" unaryfunc getter = NULL; PyTypeObject *type = Py_TYPE(obj); @@ -1007,16 +1019,16 @@ "'async for' requires an object with " "__aiter__ method, got %.100s", type->tp_name); - #line 1011 "Python/generated_cases.c.h" + #line 1023 "Python/generated_cases.c.h" Py_DECREF(obj); - #line 716 "Python/bytecodes.c" + #line 722 "Python/bytecodes.c" if (true) goto pop_1_error; } iter = (*getter)(obj); - #line 1018 "Python/generated_cases.c.h" + #line 1030 "Python/generated_cases.c.h" Py_DECREF(obj); - #line 721 "Python/bytecodes.c" + #line 727 "Python/bytecodes.c" if (iter == NULL) goto pop_1_error; if (Py_TYPE(iter)->tp_as_async == NULL || @@ -1029,7 +1041,7 @@ Py_DECREF(iter); if (true) goto pop_1_error; } - #line 1033 "Python/generated_cases.c.h" + #line 1045 "Python/generated_cases.c.h" stack_pointer[-1] = iter; DISPATCH(); } @@ -1037,7 +1049,7 @@ TARGET(GET_ANEXT) { PyObject *aiter = stack_pointer[-1]; PyObject *awaitable; - #line 736 "Python/bytecodes.c" + #line 742 "Python/bytecodes.c" unaryfunc getter = NULL; PyObject *next_iter = NULL; PyTypeObject *type = Py_TYPE(aiter); @@ -1081,7 +1093,7 @@ } } - #line 1085 "Python/generated_cases.c.h" + #line 1097 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = awaitable; PREDICT(LOAD_CONST); @@ -1092,16 +1104,16 @@ PREDICTED(GET_AWAITABLE); PyObject *iterable = stack_pointer[-1]; PyObject *iter; - #line 783 "Python/bytecodes.c" + #line 789 "Python/bytecodes.c" iter = _PyCoro_GetAwaitableIter(iterable); if (iter == NULL) { format_awaitable_error(tstate, Py_TYPE(iterable), oparg); } - #line 1103 "Python/generated_cases.c.h" + #line 1115 "Python/generated_cases.c.h" Py_DECREF(iterable); - #line 790 "Python/bytecodes.c" + #line 796 "Python/bytecodes.c" if (iter != NULL && PyCoro_CheckExact(iter)) { PyObject *yf = _PyGen_yf((PyGenObject*)iter); @@ -1119,7 +1131,7 @@ if (iter == NULL) goto pop_1_error; - #line 1123 "Python/generated_cases.c.h" + #line 1135 "Python/generated_cases.c.h" stack_pointer[-1] = iter; PREDICT(LOAD_CONST); DISPATCH(); @@ -1130,7 +1142,7 @@ PyObject *v = stack_pointer[-1]; PyObject *receiver = stack_pointer[-2]; PyObject *retval; - #line 816 "Python/bytecodes.c" + #line 822 "Python/bytecodes.c" #if ENABLE_SPECIALIZATION _PySendCache *cache = (_PySendCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -1176,7 +1188,7 @@ } } Py_DECREF(v); - #line 1180 "Python/generated_cases.c.h" + #line 1192 "Python/generated_cases.c.h" stack_pointer[-1] = retval; next_instr += 1; DISPATCH(); @@ -1185,7 +1197,7 @@ TARGET(SEND_GEN) { PyObject *v = stack_pointer[-1]; PyObject *receiver = stack_pointer[-2]; - #line 864 "Python/bytecodes.c" + #line 870 "Python/bytecodes.c" PyGenObject *gen = (PyGenObject *)receiver; DEOPT_IF(Py_TYPE(gen) != &PyGen_Type && Py_TYPE(gen) != &PyCoro_Type, SEND); @@ -1200,12 +1212,12 @@ tstate->exc_info = &gen->gi_exc_state; JUMPBY(INLINE_CACHE_ENTRIES_SEND); DISPATCH_INLINED(gen_frame); - #line 1204 "Python/generated_cases.c.h" + #line 1216 "Python/generated_cases.c.h" } TARGET(INSTRUMENTED_YIELD_VALUE) { PyObject *retval = stack_pointer[-1]; - #line 881 "Python/bytecodes.c" + #line 887 "Python/bytecodes.c" assert(frame != &entry_frame); PyGenObject *gen = _PyFrame_GetGenerator(frame); gen->gi_frame_state = FRAME_SUSPENDED; @@ -1222,12 +1234,12 @@ gen_frame->previous = NULL; _PyFrame_StackPush(frame, retval); goto resume_frame; - #line 1226 "Python/generated_cases.c.h" + #line 1238 "Python/generated_cases.c.h" } TARGET(YIELD_VALUE) { PyObject *retval = stack_pointer[-1]; - #line 900 "Python/bytecodes.c" + #line 906 "Python/bytecodes.c" // NOTE: It's important that YIELD_VALUE never raises an exception! // The compiler treats any exception raised here as a failed close() // or throw() call. @@ -1243,15 +1255,15 @@ gen_frame->previous = NULL; _PyFrame_StackPush(frame, retval); goto resume_frame; - #line 1247 "Python/generated_cases.c.h" + #line 1259 "Python/generated_cases.c.h" } TARGET(POP_EXCEPT) { PyObject *exc_value = stack_pointer[-1]; - #line 918 "Python/bytecodes.c" + #line 924 "Python/bytecodes.c" _PyErr_StackItem *exc_info = tstate->exc_info; Py_XSETREF(exc_info->exc_value, exc_value); - #line 1255 "Python/generated_cases.c.h" + #line 1267 "Python/generated_cases.c.h" STACK_SHRINK(1); DISPATCH(); } @@ -1259,7 +1271,7 @@ TARGET(RERAISE) { PyObject *exc = stack_pointer[-1]; PyObject **values = (stack_pointer - (1 + oparg)); - #line 923 "Python/bytecodes.c" + #line 929 "Python/bytecodes.c" assert(oparg >= 0 && oparg <= 2); if (oparg) { PyObject *lasti = values[0]; @@ -1277,26 +1289,26 @@ Py_INCREF(exc); _PyErr_SetRaisedException(tstate, exc); goto exception_unwind; - #line 1281 "Python/generated_cases.c.h" + #line 1293 "Python/generated_cases.c.h" } TARGET(END_ASYNC_FOR) { PyObject *exc = stack_pointer[-1]; PyObject *awaitable = stack_pointer[-2]; - #line 943 "Python/bytecodes.c" + #line 949 "Python/bytecodes.c" assert(exc && PyExceptionInstance_Check(exc)); if (PyErr_GivenExceptionMatches(exc, PyExc_StopAsyncIteration)) { - #line 1290 "Python/generated_cases.c.h" + #line 1302 "Python/generated_cases.c.h" Py_DECREF(awaitable); Py_DECREF(exc); - #line 946 "Python/bytecodes.c" + #line 952 "Python/bytecodes.c" } else { Py_INCREF(exc); _PyErr_SetRaisedException(tstate, exc); goto exception_unwind; } - #line 1300 "Python/generated_cases.c.h" + #line 1312 "Python/generated_cases.c.h" STACK_SHRINK(2); DISPATCH(); } @@ -1307,23 +1319,23 @@ PyObject *sub_iter = stack_pointer[-3]; PyObject *none; PyObject *value; - #line 955 "Python/bytecodes.c" + #line 961 "Python/bytecodes.c" assert(throwflag); assert(exc_value && PyExceptionInstance_Check(exc_value)); if (PyErr_GivenExceptionMatches(exc_value, PyExc_StopIteration)) { value = Py_NewRef(((PyStopIterationObject *)exc_value)->value); - #line 1316 "Python/generated_cases.c.h" + #line 1328 "Python/generated_cases.c.h" Py_DECREF(sub_iter); Py_DECREF(last_sent_val); Py_DECREF(exc_value); - #line 960 "Python/bytecodes.c" + #line 966 "Python/bytecodes.c" none = Py_NewRef(Py_None); } else { _PyErr_SetRaisedException(tstate, Py_NewRef(exc_value)); goto exception_unwind; } - #line 1327 "Python/generated_cases.c.h" + #line 1339 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = value; stack_pointer[-2] = none; @@ -1332,9 +1344,9 @@ TARGET(LOAD_ASSERTION_ERROR) { PyObject *value; - #line 969 "Python/bytecodes.c" + #line 975 "Python/bytecodes.c" value = Py_NewRef(PyExc_AssertionError); - #line 1338 "Python/generated_cases.c.h" + #line 1350 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = value; DISPATCH(); @@ -1342,7 +1354,7 @@ TARGET(LOAD_BUILD_CLASS) { PyObject *bc; - #line 973 "Python/bytecodes.c" + #line 979 "Python/bytecodes.c" if (PyDict_CheckExact(BUILTINS())) { bc = _PyDict_GetItemWithError(BUILTINS(), &_Py_ID(__build_class__)); @@ -1364,7 +1376,7 @@ if (true) goto error; } } - #line 1368 "Python/generated_cases.c.h" + #line 1380 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = bc; DISPATCH(); @@ -1372,33 +1384,33 @@ TARGET(STORE_NAME) { PyObject *v = stack_pointer[-1]; - #line 997 "Python/bytecodes.c" + #line 1003 "Python/bytecodes.c" PyObject *name = GETITEM(frame->f_code->co_names, oparg); PyObject *ns = LOCALS(); int err; if (ns == NULL) { _PyErr_Format(tstate, PyExc_SystemError, "no locals found when storing %R", name); - #line 1383 "Python/generated_cases.c.h" + #line 1395 "Python/generated_cases.c.h" Py_DECREF(v); - #line 1004 "Python/bytecodes.c" + #line 1010 "Python/bytecodes.c" if (true) goto pop_1_error; } if (PyDict_CheckExact(ns)) err = PyDict_SetItem(ns, name, v); else err = PyObject_SetItem(ns, name, v); - #line 1392 "Python/generated_cases.c.h" + #line 1404 "Python/generated_cases.c.h" Py_DECREF(v); - #line 1011 "Python/bytecodes.c" + #line 1017 "Python/bytecodes.c" if (err) goto pop_1_error; - #line 1396 "Python/generated_cases.c.h" + #line 1408 "Python/generated_cases.c.h" STACK_SHRINK(1); DISPATCH(); } TARGET(DELETE_NAME) { - #line 1015 "Python/bytecodes.c" + #line 1021 "Python/bytecodes.c" PyObject *name = GETITEM(frame->f_code->co_names, oparg); PyObject *ns = LOCALS(); int err; @@ -1415,7 +1427,7 @@ name); goto error; } - #line 1419 "Python/generated_cases.c.h" + #line 1431 "Python/generated_cases.c.h" DISPATCH(); } @@ -1423,7 +1435,7 @@ PREDICTED(UNPACK_SEQUENCE); static_assert(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE == 1, "incorrect cache size"); PyObject *seq = stack_pointer[-1]; - #line 1041 "Python/bytecodes.c" + #line 1047 "Python/bytecodes.c" #if ENABLE_SPECIALIZATION _PyUnpackSequenceCache *cache = (_PyUnpackSequenceCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -1436,11 +1448,11 @@ #endif /* ENABLE_SPECIALIZATION */ PyObject **top = stack_pointer + oparg - 1; int res = unpack_iterable(tstate, seq, oparg, -1, top); - #line 1440 "Python/generated_cases.c.h" + #line 1452 "Python/generated_cases.c.h" Py_DECREF(seq); - #line 1054 "Python/bytecodes.c" + #line 1060 "Python/bytecodes.c" if (res == 0) goto pop_1_error; - #line 1444 "Python/generated_cases.c.h" + #line 1456 "Python/generated_cases.c.h" STACK_SHRINK(1); STACK_GROW(oparg); next_instr += 1; @@ -1450,14 +1462,14 @@ TARGET(UNPACK_SEQUENCE_TWO_TUPLE) { PyObject *seq = stack_pointer[-1]; PyObject **values = stack_pointer - (1); - #line 1058 "Python/bytecodes.c" + #line 1064 "Python/bytecodes.c" DEOPT_IF(!PyTuple_CheckExact(seq), UNPACK_SEQUENCE); DEOPT_IF(PyTuple_GET_SIZE(seq) != 2, UNPACK_SEQUENCE); assert(oparg == 2); STAT_INC(UNPACK_SEQUENCE, hit); values[0] = Py_NewRef(PyTuple_GET_ITEM(seq, 1)); values[1] = Py_NewRef(PyTuple_GET_ITEM(seq, 0)); - #line 1461 "Python/generated_cases.c.h" + #line 1473 "Python/generated_cases.c.h" Py_DECREF(seq); STACK_SHRINK(1); STACK_GROW(oparg); @@ -1468,7 +1480,7 @@ TARGET(UNPACK_SEQUENCE_TUPLE) { PyObject *seq = stack_pointer[-1]; PyObject **values = stack_pointer - (1); - #line 1068 "Python/bytecodes.c" + #line 1074 "Python/bytecodes.c" DEOPT_IF(!PyTuple_CheckExact(seq), UNPACK_SEQUENCE); DEOPT_IF(PyTuple_GET_SIZE(seq) != oparg, UNPACK_SEQUENCE); STAT_INC(UNPACK_SEQUENCE, hit); @@ -1476,7 +1488,7 @@ for (int i = oparg; --i >= 0; ) { *values++ = Py_NewRef(items[i]); } - #line 1480 "Python/generated_cases.c.h" + #line 1492 "Python/generated_cases.c.h" Py_DECREF(seq); STACK_SHRINK(1); STACK_GROW(oparg); @@ -1487,7 +1499,7 @@ TARGET(UNPACK_SEQUENCE_LIST) { PyObject *seq = stack_pointer[-1]; PyObject **values = stack_pointer - (1); - #line 1079 "Python/bytecodes.c" + #line 1085 "Python/bytecodes.c" DEOPT_IF(!PyList_CheckExact(seq), UNPACK_SEQUENCE); DEOPT_IF(PyList_GET_SIZE(seq) != oparg, UNPACK_SEQUENCE); STAT_INC(UNPACK_SEQUENCE, hit); @@ -1495,7 +1507,7 @@ for (int i = oparg; --i >= 0; ) { *values++ = Py_NewRef(items[i]); } - #line 1499 "Python/generated_cases.c.h" + #line 1511 "Python/generated_cases.c.h" Py_DECREF(seq); STACK_SHRINK(1); STACK_GROW(oparg); @@ -1505,15 +1517,15 @@ TARGET(UNPACK_EX) { PyObject *seq = stack_pointer[-1]; - #line 1090 "Python/bytecodes.c" + #line 1096 "Python/bytecodes.c" int totalargs = 1 + (oparg & 0xFF) + (oparg >> 8); PyObject **top = stack_pointer + totalargs - 1; int res = unpack_iterable(tstate, seq, oparg & 0xFF, oparg >> 8, top); - #line 1513 "Python/generated_cases.c.h" + #line 1525 "Python/generated_cases.c.h" Py_DECREF(seq); - #line 1094 "Python/bytecodes.c" + #line 1100 "Python/bytecodes.c" if (res == 0) goto pop_1_error; - #line 1517 "Python/generated_cases.c.h" + #line 1529 "Python/generated_cases.c.h" STACK_GROW((oparg & 0xFF) + (oparg >> 8)); DISPATCH(); } @@ -1524,7 +1536,7 @@ PyObject *owner = stack_pointer[-1]; PyObject *v = stack_pointer[-2]; uint16_t counter = read_u16(&next_instr[0].cache); - #line 1105 "Python/bytecodes.c" + #line 1111 "Python/bytecodes.c" #if ENABLE_SPECIALIZATION if (ADAPTIVE_COUNTER_IS_ZERO(counter)) { PyObject *name = GETITEM(frame->f_code->co_names, oparg); @@ -1540,12 +1552,12 @@ #endif /* ENABLE_SPECIALIZATION */ PyObject *name = GETITEM(frame->f_code->co_names, oparg); int err = PyObject_SetAttr(owner, name, v); - #line 1544 "Python/generated_cases.c.h" + #line 1556 "Python/generated_cases.c.h" Py_DECREF(v); Py_DECREF(owner); - #line 1121 "Python/bytecodes.c" + #line 1127 "Python/bytecodes.c" if (err) goto pop_2_error; - #line 1549 "Python/generated_cases.c.h" + #line 1561 "Python/generated_cases.c.h" STACK_SHRINK(2); next_instr += 4; DISPATCH(); @@ -1553,34 +1565,34 @@ TARGET(DELETE_ATTR) { PyObject *owner = stack_pointer[-1]; - #line 1125 "Python/bytecodes.c" + #line 1131 "Python/bytecodes.c" PyObject *name = GETITEM(frame->f_code->co_names, oparg); int err = PyObject_SetAttr(owner, name, (PyObject *)NULL); - #line 1560 "Python/generated_cases.c.h" + #line 1572 "Python/generated_cases.c.h" Py_DECREF(owner); - #line 1128 "Python/bytecodes.c" + #line 1134 "Python/bytecodes.c" if (err) goto pop_1_error; - #line 1564 "Python/generated_cases.c.h" + #line 1576 "Python/generated_cases.c.h" STACK_SHRINK(1); DISPATCH(); } TARGET(STORE_GLOBAL) { PyObject *v = stack_pointer[-1]; - #line 1132 "Python/bytecodes.c" + #line 1138 "Python/bytecodes.c" PyObject *name = GETITEM(frame->f_code->co_names, oparg); int err = PyDict_SetItem(GLOBALS(), name, v); - #line 1574 "Python/generated_cases.c.h" + #line 1586 "Python/generated_cases.c.h" Py_DECREF(v); - #line 1135 "Python/bytecodes.c" + #line 1141 "Python/bytecodes.c" if (err) goto pop_1_error; - #line 1578 "Python/generated_cases.c.h" + #line 1590 "Python/generated_cases.c.h" STACK_SHRINK(1); DISPATCH(); } TARGET(DELETE_GLOBAL) { - #line 1139 "Python/bytecodes.c" + #line 1145 "Python/bytecodes.c" PyObject *name = GETITEM(frame->f_code->co_names, oparg); int err; err = PyDict_DelItem(GLOBALS(), name); @@ -1592,13 +1604,13 @@ } goto error; } - #line 1596 "Python/generated_cases.c.h" + #line 1608 "Python/generated_cases.c.h" DISPATCH(); } TARGET(LOAD_NAME) { PyObject *v; - #line 1153 "Python/bytecodes.c" + #line 1159 "Python/bytecodes.c" PyObject *name = GETITEM(frame->f_code->co_names, oparg); PyObject *locals = LOCALS(); if (locals == NULL) { @@ -1657,7 +1669,7 @@ } } } - #line 1661 "Python/generated_cases.c.h" + #line 1673 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = v; DISPATCH(); @@ -1668,7 +1680,7 @@ static_assert(INLINE_CACHE_ENTRIES_LOAD_GLOBAL == 4, "incorrect cache size"); PyObject *null = NULL; PyObject *v; - #line 1220 "Python/bytecodes.c" + #line 1226 "Python/bytecodes.c" #if ENABLE_SPECIALIZATION _PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -1720,7 +1732,7 @@ } } null = NULL; - #line 1724 "Python/generated_cases.c.h" + #line 1736 "Python/generated_cases.c.h" STACK_GROW(1); STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = v; @@ -1734,7 +1746,7 @@ PyObject *res; uint16_t index = read_u16(&next_instr[1].cache); uint16_t version = read_u16(&next_instr[2].cache); - #line 1274 "Python/bytecodes.c" + #line 1280 "Python/bytecodes.c" DEOPT_IF(!PyDict_CheckExact(GLOBALS()), LOAD_GLOBAL); PyDictObject *dict = (PyDictObject *)GLOBALS(); DEOPT_IF(dict->ma_keys->dk_version != version, LOAD_GLOBAL); @@ -1745,7 +1757,7 @@ Py_INCREF(res); STAT_INC(LOAD_GLOBAL, hit); null = NULL; - #line 1749 "Python/generated_cases.c.h" + #line 1761 "Python/generated_cases.c.h" STACK_GROW(1); STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; @@ -1760,7 +1772,7 @@ uint16_t index = read_u16(&next_instr[1].cache); uint16_t mod_version = read_u16(&next_instr[2].cache); uint16_t bltn_version = read_u16(&next_instr[3].cache); - #line 1287 "Python/bytecodes.c" + #line 1293 "Python/bytecodes.c" DEOPT_IF(!PyDict_CheckExact(GLOBALS()), LOAD_GLOBAL); DEOPT_IF(!PyDict_CheckExact(BUILTINS()), LOAD_GLOBAL); PyDictObject *mdict = (PyDictObject *)GLOBALS(); @@ -1775,7 +1787,7 @@ Py_INCREF(res); STAT_INC(LOAD_GLOBAL, hit); null = NULL; - #line 1779 "Python/generated_cases.c.h" + #line 1791 "Python/generated_cases.c.h" STACK_GROW(1); STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; @@ -1785,16 +1797,16 @@ } TARGET(DELETE_FAST) { - #line 1304 "Python/bytecodes.c" + #line 1310 "Python/bytecodes.c" PyObject *v = GETLOCAL(oparg); if (v == NULL) goto unbound_local_error; SETLOCAL(oparg, NULL); - #line 1793 "Python/generated_cases.c.h" + #line 1805 "Python/generated_cases.c.h" DISPATCH(); } TARGET(MAKE_CELL) { - #line 1310 "Python/bytecodes.c" + #line 1316 "Python/bytecodes.c" // "initial" is probably NULL but not if it's an arg (or set // via PyFrame_LocalsToFast() before MAKE_CELL has run). PyObject *initial = GETLOCAL(oparg); @@ -1803,12 +1815,12 @@ goto resume_with_error; } SETLOCAL(oparg, cell); - #line 1807 "Python/generated_cases.c.h" + #line 1819 "Python/generated_cases.c.h" DISPATCH(); } TARGET(DELETE_DEREF) { - #line 1321 "Python/bytecodes.c" + #line 1327 "Python/bytecodes.c" PyObject *cell = GETLOCAL(oparg); PyObject *oldobj = PyCell_GET(cell); // Can't use ERROR_IF here. @@ -1819,13 +1831,13 @@ } PyCell_SET(cell, NULL); Py_DECREF(oldobj); - #line 1823 "Python/generated_cases.c.h" + #line 1835 "Python/generated_cases.c.h" DISPATCH(); } TARGET(LOAD_CLASSDEREF) { PyObject *value; - #line 1334 "Python/bytecodes.c" + #line 1340 "Python/bytecodes.c" PyObject *name, *locals = LOCALS(); assert(locals); assert(oparg >= 0 && oparg < frame->f_code->co_nlocalsplus); @@ -1857,7 +1869,7 @@ } Py_INCREF(value); } - #line 1861 "Python/generated_cases.c.h" + #line 1873 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = value; DISPATCH(); @@ -1865,7 +1877,7 @@ TARGET(LOAD_DEREF) { PyObject *value; - #line 1368 "Python/bytecodes.c" + #line 1374 "Python/bytecodes.c" PyObject *cell = GETLOCAL(oparg); value = PyCell_GET(cell); if (value == NULL) { @@ -1873,7 +1885,7 @@ if (true) goto error; } Py_INCREF(value); - #line 1877 "Python/generated_cases.c.h" + #line 1889 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = value; DISPATCH(); @@ -1881,18 +1893,18 @@ TARGET(STORE_DEREF) { PyObject *v = stack_pointer[-1]; - #line 1378 "Python/bytecodes.c" + #line 1384 "Python/bytecodes.c" PyObject *cell = GETLOCAL(oparg); PyObject *oldobj = PyCell_GET(cell); PyCell_SET(cell, v); Py_XDECREF(oldobj); - #line 1890 "Python/generated_cases.c.h" + #line 1902 "Python/generated_cases.c.h" STACK_SHRINK(1); DISPATCH(); } TARGET(COPY_FREE_VARS) { - #line 1385 "Python/bytecodes.c" + #line 1391 "Python/bytecodes.c" /* Copy closure variables to free variables */ PyCodeObject *co = frame->f_code; assert(PyFunction_Check(frame->f_funcobj)); @@ -1903,22 +1915,22 @@ PyObject *o = PyTuple_GET_ITEM(closure, i); frame->localsplus[offset + i] = Py_NewRef(o); } - #line 1907 "Python/generated_cases.c.h" + #line 1919 "Python/generated_cases.c.h" DISPATCH(); } TARGET(BUILD_STRING) { PyObject **pieces = (stack_pointer - oparg); PyObject *str; - #line 1398 "Python/bytecodes.c" + #line 1404 "Python/bytecodes.c" str = _PyUnicode_JoinArray(&_Py_STR(empty), pieces, oparg); - #line 1916 "Python/generated_cases.c.h" + #line 1928 "Python/generated_cases.c.h" for (int _i = oparg; --_i >= 0;) { Py_DECREF(pieces[_i]); } - #line 1400 "Python/bytecodes.c" + #line 1406 "Python/bytecodes.c" if (str == NULL) { STACK_SHRINK(oparg); goto error; } - #line 1922 "Python/generated_cases.c.h" + #line 1934 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_GROW(1); stack_pointer[-1] = str; @@ -1928,10 +1940,10 @@ TARGET(BUILD_TUPLE) { PyObject **values = (stack_pointer - oparg); PyObject *tup; - #line 1404 "Python/bytecodes.c" + #line 1410 "Python/bytecodes.c" tup = _PyTuple_FromArraySteal(values, oparg); if (tup == NULL) { STACK_SHRINK(oparg); goto error; } - #line 1935 "Python/generated_cases.c.h" + #line 1947 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_GROW(1); stack_pointer[-1] = tup; @@ -1941,10 +1953,10 @@ TARGET(BUILD_LIST) { PyObject **values = (stack_pointer - oparg); PyObject *list; - #line 1409 "Python/bytecodes.c" + #line 1415 "Python/bytecodes.c" list = _PyList_FromArraySteal(values, oparg); if (list == NULL) { STACK_SHRINK(oparg); goto error; } - #line 1948 "Python/generated_cases.c.h" + #line 1960 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_GROW(1); stack_pointer[-1] = list; @@ -1954,7 +1966,7 @@ TARGET(LIST_EXTEND) { PyObject *iterable = stack_pointer[-1]; PyObject *list = stack_pointer[-(2 + (oparg-1))]; - #line 1414 "Python/bytecodes.c" + #line 1420 "Python/bytecodes.c" PyObject *none_val = _PyList_Extend((PyListObject *)list, iterable); if (none_val == NULL) { if (_PyErr_ExceptionMatches(tstate, PyExc_TypeError) && @@ -1965,13 +1977,13 @@ "Value after * must be an iterable, not %.200s", Py_TYPE(iterable)->tp_name); } - #line 1969 "Python/generated_cases.c.h" + #line 1981 "Python/generated_cases.c.h" Py_DECREF(iterable); - #line 1425 "Python/bytecodes.c" + #line 1431 "Python/bytecodes.c" if (true) goto pop_1_error; } Py_DECREF(none_val); - #line 1975 "Python/generated_cases.c.h" + #line 1987 "Python/generated_cases.c.h" Py_DECREF(iterable); STACK_SHRINK(1); DISPATCH(); @@ -1980,13 +1992,13 @@ TARGET(SET_UPDATE) { PyObject *iterable = stack_pointer[-1]; PyObject *set = stack_pointer[-(2 + (oparg-1))]; - #line 1432 "Python/bytecodes.c" + #line 1438 "Python/bytecodes.c" int err = _PySet_Update(set, iterable); - #line 1986 "Python/generated_cases.c.h" + #line 1998 "Python/generated_cases.c.h" Py_DECREF(iterable); - #line 1434 "Python/bytecodes.c" + #line 1440 "Python/bytecodes.c" if (err < 0) goto pop_1_error; - #line 1990 "Python/generated_cases.c.h" + #line 2002 "Python/generated_cases.c.h" STACK_SHRINK(1); DISPATCH(); } @@ -1994,7 +2006,7 @@ TARGET(BUILD_SET) { PyObject **values = (stack_pointer - oparg); PyObject *set; - #line 1438 "Python/bytecodes.c" + #line 1444 "Python/bytecodes.c" set = PySet_New(NULL); if (set == NULL) goto error; @@ -2009,7 +2021,7 @@ Py_DECREF(set); if (true) { STACK_SHRINK(oparg); goto error; } } - #line 2013 "Python/generated_cases.c.h" + #line 2025 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_GROW(1); stack_pointer[-1] = set; @@ -2019,7 +2031,7 @@ TARGET(BUILD_MAP) { PyObject **values = (stack_pointer - oparg*2); PyObject *map; - #line 1455 "Python/bytecodes.c" + #line 1461 "Python/bytecodes.c" map = _PyDict_FromItems( values, 2, values+1, 2, @@ -2027,13 +2039,13 @@ if (map == NULL) goto error; - #line 2031 "Python/generated_cases.c.h" + #line 2043 "Python/generated_cases.c.h" for (int _i = oparg*2; --_i >= 0;) { Py_DECREF(values[_i]); } - #line 1463 "Python/bytecodes.c" + #line 1469 "Python/bytecodes.c" if (map == NULL) { STACK_SHRINK(oparg*2); goto error; } - #line 2037 "Python/generated_cases.c.h" + #line 2049 "Python/generated_cases.c.h" STACK_SHRINK(oparg*2); STACK_GROW(1); stack_pointer[-1] = map; @@ -2041,7 +2053,7 @@ } TARGET(SETUP_ANNOTATIONS) { - #line 1467 "Python/bytecodes.c" + #line 1473 "Python/bytecodes.c" int err; PyObject *ann_dict; if (LOCALS() == NULL) { @@ -2081,7 +2093,7 @@ Py_DECREF(ann_dict); } } - #line 2085 "Python/generated_cases.c.h" + #line 2097 "Python/generated_cases.c.h" DISPATCH(); } @@ -2089,7 +2101,7 @@ PyObject *keys = stack_pointer[-1]; PyObject **values = (stack_pointer - (1 + oparg)); PyObject *map; - #line 1509 "Python/bytecodes.c" + #line 1515 "Python/bytecodes.c" if (!PyTuple_CheckExact(keys) || PyTuple_GET_SIZE(keys) != (Py_ssize_t)oparg) { _PyErr_SetString(tstate, PyExc_SystemError, @@ -2099,14 +2111,14 @@ map = _PyDict_FromItems( &PyTuple_GET_ITEM(keys, 0), 1, values, 1, oparg); - #line 2103 "Python/generated_cases.c.h" + #line 2115 "Python/generated_cases.c.h" for (int _i = oparg; --_i >= 0;) { Py_DECREF(values[_i]); } Py_DECREF(keys); - #line 1519 "Python/bytecodes.c" + #line 1525 "Python/bytecodes.c" if (map == NULL) { STACK_SHRINK(oparg); goto pop_1_error; } - #line 2110 "Python/generated_cases.c.h" + #line 2122 "Python/generated_cases.c.h" STACK_SHRINK(oparg); stack_pointer[-1] = map; DISPATCH(); @@ -2114,7 +2126,7 @@ TARGET(DICT_UPDATE) { PyObject *update = stack_pointer[-1]; - #line 1523 "Python/bytecodes.c" + #line 1529 "Python/bytecodes.c" PyObject *dict = PEEK(oparg + 1); // update is still on the stack if (PyDict_Update(dict, update) < 0) { if (_PyErr_ExceptionMatches(tstate, PyExc_AttributeError)) { @@ -2122,12 +2134,12 @@ "'%.200s' object is not a mapping", Py_TYPE(update)->tp_name); } - #line 2126 "Python/generated_cases.c.h" + #line 2138 "Python/generated_cases.c.h" Py_DECREF(update); - #line 1531 "Python/bytecodes.c" + #line 1537 "Python/bytecodes.c" if (true) goto pop_1_error; } - #line 2131 "Python/generated_cases.c.h" + #line 2143 "Python/generated_cases.c.h" Py_DECREF(update); STACK_SHRINK(1); DISPATCH(); @@ -2135,17 +2147,17 @@ TARGET(DICT_MERGE) { PyObject *update = stack_pointer[-1]; - #line 1537 "Python/bytecodes.c" + #line 1543 "Python/bytecodes.c" PyObject *dict = PEEK(oparg + 1); // update is still on the stack if (_PyDict_MergeEx(dict, update, 2) < 0) { format_kwargs_error(tstate, PEEK(3 + oparg), update); - #line 2144 "Python/generated_cases.c.h" + #line 2156 "Python/generated_cases.c.h" Py_DECREF(update); - #line 1542 "Python/bytecodes.c" + #line 1548 "Python/bytecodes.c" if (true) goto pop_1_error; } - #line 2149 "Python/generated_cases.c.h" + #line 2161 "Python/generated_cases.c.h" Py_DECREF(update); STACK_SHRINK(1); PREDICT(CALL_FUNCTION_EX); @@ -2155,26 +2167,26 @@ TARGET(MAP_ADD) { PyObject *value = stack_pointer[-1]; PyObject *key = stack_pointer[-2]; - #line 1549 "Python/bytecodes.c" + #line 1555 "Python/bytecodes.c" PyObject *dict = PEEK(oparg + 2); // key, value are still on the stack assert(PyDict_CheckExact(dict)); /* dict[key] = value */ // Do not DECREF INPUTS because the function steals the references if (_PyDict_SetItem_Take2((PyDictObject *)dict, key, value) != 0) goto pop_2_error; - #line 2165 "Python/generated_cases.c.h" + #line 2177 "Python/generated_cases.c.h" STACK_SHRINK(2); PREDICT(JUMP_BACKWARD); DISPATCH(); } TARGET(INSTRUMENTED_LOAD_SUPER_ATTR) { - #line 1558 "Python/bytecodes.c" + #line 1564 "Python/bytecodes.c" _PySuperAttrCache *cache = (_PySuperAttrCache *)next_instr; // cancel out the decrement that will happen in LOAD_SUPER_ATTR; we // don't want to specialize instrumented instructions INCREMENT_ADAPTIVE_COUNTER(cache->counter); GO_TO_INSTRUCTION(LOAD_SUPER_ATTR); - #line 2178 "Python/generated_cases.c.h" + #line 2190 "Python/generated_cases.c.h" } TARGET(LOAD_SUPER_ATTR) { @@ -2185,7 +2197,7 @@ PyObject *global_super = stack_pointer[-3]; PyObject *res2 = NULL; PyObject *res; - #line 1571 "Python/bytecodes.c" + #line 1577 "Python/bytecodes.c" PyObject *name = GETITEM(frame->f_code->co_names, oparg >> 2); int load_method = oparg & 1; #if ENABLE_SPECIALIZATION @@ -2211,11 +2223,11 @@ // handle any case whose performance we care about PyObject *stack[] = {class, self}; PyObject *super = PyObject_Vectorcall(global_super, stack, oparg & 2, NULL); - #line 2215 "Python/generated_cases.c.h" + #line 2227 "Python/generated_cases.c.h" Py_DECREF(global_super); Py_DECREF(class); Py_DECREF(self); - #line 1597 "Python/bytecodes.c" + #line 1603 "Python/bytecodes.c" if (opcode == INSTRUMENTED_LOAD_SUPER_ATTR) { PyObject *arg = oparg & 2 ? class : &_PyInstrumentation_MISSING; if (super == NULL) { @@ -2236,7 +2248,7 @@ res = PyObject_GetAttr(super, name); Py_DECREF(super); if (res == NULL) goto pop_3_error; - #line 2240 "Python/generated_cases.c.h" + #line 2252 "Python/generated_cases.c.h" STACK_SHRINK(2); STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; @@ -2254,7 +2266,7 @@ uint32_t class_version = read_u32(&next_instr[1].cache); uint32_t self_type_version = read_u32(&next_instr[3].cache); PyObject *method = read_obj(&next_instr[5].cache); - #line 1620 "Python/bytecodes.c" + #line 1626 "Python/bytecodes.c" DEOPT_IF(global_super != (PyObject *)&PySuper_Type, LOAD_SUPER_ATTR); DEOPT_IF(!PyType_Check(class), LOAD_SUPER_ATTR); DEOPT_IF(((PyTypeObject *)class)->tp_version_tag != class_version, LOAD_SUPER_ATTR); @@ -2265,7 +2277,7 @@ Py_INCREF(res2); Py_DECREF(global_super); Py_DECREF(class); - #line 2269 "Python/generated_cases.c.h" + #line 2281 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = res; stack_pointer[-2] = res2; @@ -2279,7 +2291,7 @@ PyObject *owner = stack_pointer[-1]; PyObject *res2 = NULL; PyObject *res; - #line 1647 "Python/bytecodes.c" + #line 1653 "Python/bytecodes.c" #if ENABLE_SPECIALIZATION _PyAttrCache *cache = (_PyAttrCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -2313,9 +2325,9 @@ NULL | meth | arg1 | ... | argN */ - #line 2317 "Python/generated_cases.c.h" + #line 2329 "Python/generated_cases.c.h" Py_DECREF(owner); - #line 1681 "Python/bytecodes.c" + #line 1687 "Python/bytecodes.c" if (meth == NULL) goto pop_1_error; res2 = NULL; res = meth; @@ -2324,12 +2336,12 @@ else { /* Classic, pushes one value. */ res = PyObject_GetAttr(owner, name); - #line 2328 "Python/generated_cases.c.h" + #line 2340 "Python/generated_cases.c.h" Py_DECREF(owner); - #line 1690 "Python/bytecodes.c" + #line 1696 "Python/bytecodes.c" if (res == NULL) goto pop_1_error; } - #line 2333 "Python/generated_cases.c.h" + #line 2345 "Python/generated_cases.c.h" STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } @@ -2343,7 +2355,7 @@ PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t index = read_u16(&next_instr[3].cache); - #line 1695 "Python/bytecodes.c" + #line 1701 "Python/bytecodes.c" PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); @@ -2356,7 +2368,7 @@ STAT_INC(LOAD_ATTR, hit); Py_INCREF(res); res2 = NULL; - #line 2360 "Python/generated_cases.c.h" + #line 2372 "Python/generated_cases.c.h" Py_DECREF(owner); STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; @@ -2371,7 +2383,7 @@ PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t index = read_u16(&next_instr[3].cache); - #line 1711 "Python/bytecodes.c" + #line 1717 "Python/bytecodes.c" DEOPT_IF(!PyModule_CheckExact(owner), LOAD_ATTR); PyDictObject *dict = (PyDictObject *)((PyModuleObject *)owner)->md_dict; assert(dict != NULL); @@ -2384,7 +2396,7 @@ STAT_INC(LOAD_ATTR, hit); Py_INCREF(res); res2 = NULL; - #line 2388 "Python/generated_cases.c.h" + #line 2400 "Python/generated_cases.c.h" Py_DECREF(owner); STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; @@ -2399,7 +2411,7 @@ PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t index = read_u16(&next_instr[3].cache); - #line 1727 "Python/bytecodes.c" + #line 1733 "Python/bytecodes.c" PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); @@ -2426,7 +2438,7 @@ STAT_INC(LOAD_ATTR, hit); Py_INCREF(res); res2 = NULL; - #line 2430 "Python/generated_cases.c.h" + #line 2442 "Python/generated_cases.c.h" Py_DECREF(owner); STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; @@ -2441,7 +2453,7 @@ PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t index = read_u16(&next_instr[3].cache); - #line 1757 "Python/bytecodes.c" + #line 1763 "Python/bytecodes.c" PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); @@ -2451,7 +2463,7 @@ STAT_INC(LOAD_ATTR, hit); Py_INCREF(res); res2 = NULL; - #line 2455 "Python/generated_cases.c.h" + #line 2467 "Python/generated_cases.c.h" Py_DECREF(owner); STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; @@ -2466,7 +2478,7 @@ PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); PyObject *descr = read_obj(&next_instr[5].cache); - #line 1770 "Python/bytecodes.c" + #line 1776 "Python/bytecodes.c" DEOPT_IF(!PyType_Check(cls), LOAD_ATTR); DEOPT_IF(((PyTypeObject *)cls)->tp_version_tag != type_version, @@ -2478,7 +2490,7 @@ res = descr; assert(res != NULL); Py_INCREF(res); - #line 2482 "Python/generated_cases.c.h" + #line 2494 "Python/generated_cases.c.h" Py_DECREF(cls); STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; @@ -2492,7 +2504,7 @@ uint32_t type_version = read_u32(&next_instr[1].cache); uint32_t func_version = read_u32(&next_instr[3].cache); PyObject *fget = read_obj(&next_instr[5].cache); - #line 1785 "Python/bytecodes.c" + #line 1791 "Python/bytecodes.c" DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR); PyTypeObject *cls = Py_TYPE(owner); @@ -2516,7 +2528,7 @@ JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); frame->return_offset = 0; DISPATCH_INLINED(new_frame); - #line 2520 "Python/generated_cases.c.h" + #line 2532 "Python/generated_cases.c.h" } TARGET(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN) { @@ -2524,7 +2536,7 @@ uint32_t type_version = read_u32(&next_instr[1].cache); uint32_t func_version = read_u32(&next_instr[3].cache); PyObject *getattribute = read_obj(&next_instr[5].cache); - #line 1811 "Python/bytecodes.c" + #line 1817 "Python/bytecodes.c" DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR); PyTypeObject *cls = Py_TYPE(owner); DEOPT_IF(cls->tp_version_tag != type_version, LOAD_ATTR); @@ -2550,7 +2562,7 @@ JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR); frame->return_offset = 0; DISPATCH_INLINED(new_frame); - #line 2554 "Python/generated_cases.c.h" + #line 2566 "Python/generated_cases.c.h" } TARGET(STORE_ATTR_INSTANCE_VALUE) { @@ -2558,7 +2570,7 @@ PyObject *value = stack_pointer[-2]; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t index = read_u16(&next_instr[3].cache); - #line 1839 "Python/bytecodes.c" + #line 1845 "Python/bytecodes.c" PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); @@ -2576,7 +2588,7 @@ Py_DECREF(old_value); } Py_DECREF(owner); - #line 2580 "Python/generated_cases.c.h" + #line 2592 "Python/generated_cases.c.h" STACK_SHRINK(2); next_instr += 4; DISPATCH(); @@ -2587,7 +2599,7 @@ PyObject *value = stack_pointer[-2]; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t hint = read_u16(&next_instr[3].cache); - #line 1859 "Python/bytecodes.c" + #line 1865 "Python/bytecodes.c" PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); @@ -2626,7 +2638,7 @@ /* PEP 509 */ dict->ma_version_tag = new_version; Py_DECREF(owner); - #line 2630 "Python/generated_cases.c.h" + #line 2642 "Python/generated_cases.c.h" STACK_SHRINK(2); next_instr += 4; DISPATCH(); @@ -2637,7 +2649,7 @@ PyObject *value = stack_pointer[-2]; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t index = read_u16(&next_instr[3].cache); - #line 1900 "Python/bytecodes.c" + #line 1906 "Python/bytecodes.c" PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); @@ -2647,7 +2659,7 @@ *(PyObject **)addr = value; Py_XDECREF(old_value); Py_DECREF(owner); - #line 2651 "Python/generated_cases.c.h" + #line 2663 "Python/generated_cases.c.h" STACK_SHRINK(2); next_instr += 4; DISPATCH(); @@ -2659,7 +2671,7 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *res; - #line 1919 "Python/bytecodes.c" + #line 1925 "Python/bytecodes.c" #if ENABLE_SPECIALIZATION _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -2672,12 +2684,12 @@ #endif /* ENABLE_SPECIALIZATION */ assert((oparg >> 4) <= Py_GE); res = PyObject_RichCompare(left, right, oparg>>4); - #line 2676 "Python/generated_cases.c.h" + #line 2688 "Python/generated_cases.c.h" Py_DECREF(left); Py_DECREF(right); - #line 1932 "Python/bytecodes.c" + #line 1938 "Python/bytecodes.c" if (res == NULL) goto pop_2_error; - #line 2681 "Python/generated_cases.c.h" + #line 2693 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = res; next_instr += 1; @@ -2688,7 +2700,7 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *res; - #line 1936 "Python/bytecodes.c" + #line 1942 "Python/bytecodes.c" DEOPT_IF(!PyFloat_CheckExact(left), COMPARE_OP); DEOPT_IF(!PyFloat_CheckExact(right), COMPARE_OP); STAT_INC(COMPARE_OP, hit); @@ -2700,7 +2712,7 @@ _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc); res = (sign_ish & oparg) ? Py_True : Py_False; Py_INCREF(res); - #line 2704 "Python/generated_cases.c.h" + #line 2716 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = res; next_instr += 1; @@ -2711,7 +2723,7 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *res; - #line 1951 "Python/bytecodes.c" + #line 1957 "Python/bytecodes.c" DEOPT_IF(!PyLong_CheckExact(left), COMPARE_OP); DEOPT_IF(!PyLong_CheckExact(right), COMPARE_OP); DEOPT_IF(!_PyLong_IsCompact((PyLongObject *)left), COMPARE_OP); @@ -2727,7 +2739,7 @@ _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); res = (sign_ish & oparg) ? Py_True : Py_False; Py_INCREF(res); - #line 2731 "Python/generated_cases.c.h" + #line 2743 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = res; next_instr += 1; @@ -2738,7 +2750,7 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *res; - #line 1970 "Python/bytecodes.c" + #line 1976 "Python/bytecodes.c" DEOPT_IF(!PyUnicode_CheckExact(left), COMPARE_OP); DEOPT_IF(!PyUnicode_CheckExact(right), COMPARE_OP); STAT_INC(COMPARE_OP, hit); @@ -2751,7 +2763,7 @@ assert(COMPARISON_NOT_EQUALS + 1 == COMPARISON_EQUALS); res = ((COMPARISON_NOT_EQUALS + eq) & oparg) ? Py_True : Py_False; Py_INCREF(res); - #line 2755 "Python/generated_cases.c.h" + #line 2767 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = res; next_instr += 1; @@ -2762,14 +2774,14 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *b; - #line 1985 "Python/bytecodes.c" + #line 1991 "Python/bytecodes.c" int res = Py_Is(left, right) ^ oparg; - #line 2768 "Python/generated_cases.c.h" + #line 2780 "Python/generated_cases.c.h" Py_DECREF(left); Py_DECREF(right); - #line 1987 "Python/bytecodes.c" + #line 1993 "Python/bytecodes.c" b = Py_NewRef(res ? Py_True : Py_False); - #line 2773 "Python/generated_cases.c.h" + #line 2785 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = b; DISPATCH(); @@ -2779,15 +2791,15 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *b; - #line 1991 "Python/bytecodes.c" + #line 1997 "Python/bytecodes.c" int res = PySequence_Contains(right, left); - #line 2785 "Python/generated_cases.c.h" + #line 2797 "Python/generated_cases.c.h" Py_DECREF(left); Py_DECREF(right); - #line 1993 "Python/bytecodes.c" + #line 1999 "Python/bytecodes.c" if (res < 0) goto pop_2_error; b = Py_NewRef((res^oparg) ? Py_True : Py_False); - #line 2791 "Python/generated_cases.c.h" + #line 2803 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = b; DISPATCH(); @@ -2798,12 +2810,12 @@ PyObject *exc_value = stack_pointer[-2]; PyObject *rest; PyObject *match; - #line 1998 "Python/bytecodes.c" + #line 2004 "Python/bytecodes.c" if (check_except_star_type_valid(tstate, match_type) < 0) { - #line 2804 "Python/generated_cases.c.h" + #line 2816 "Python/generated_cases.c.h" Py_DECREF(exc_value); Py_DECREF(match_type); - #line 2000 "Python/bytecodes.c" + #line 2006 "Python/bytecodes.c" if (true) goto pop_2_error; } @@ -2811,10 +2823,10 @@ rest = NULL; int res = exception_group_match(exc_value, match_type, &match, &rest); - #line 2815 "Python/generated_cases.c.h" + #line 2827 "Python/generated_cases.c.h" Py_DECREF(exc_value); Py_DECREF(match_type); - #line 2008 "Python/bytecodes.c" + #line 2014 "Python/bytecodes.c" if (res < 0) goto pop_2_error; assert((match == NULL) == (rest == NULL)); @@ -2823,7 +2835,7 @@ if (!Py_IsNone(match)) { PyErr_SetHandledException(match); } - #line 2827 "Python/generated_cases.c.h" + #line 2839 "Python/generated_cases.c.h" stack_pointer[-1] = match; stack_pointer[-2] = rest; DISPATCH(); @@ -2833,21 +2845,21 @@ PyObject *right = stack_pointer[-1]; PyObject *left = stack_pointer[-2]; PyObject *b; - #line 2019 "Python/bytecodes.c" + #line 2025 "Python/bytecodes.c" assert(PyExceptionInstance_Check(left)); if (check_except_type_valid(tstate, right) < 0) { - #line 2840 "Python/generated_cases.c.h" + #line 2852 "Python/generated_cases.c.h" Py_DECREF(right); - #line 2022 "Python/bytecodes.c" + #line 2028 "Python/bytecodes.c" if (true) goto pop_1_error; } int res = PyErr_GivenExceptionMatches(left, right); - #line 2847 "Python/generated_cases.c.h" + #line 2859 "Python/generated_cases.c.h" Py_DECREF(right); - #line 2027 "Python/bytecodes.c" + #line 2033 "Python/bytecodes.c" b = Py_NewRef(res ? Py_True : Py_False); - #line 2851 "Python/generated_cases.c.h" + #line 2863 "Python/generated_cases.c.h" stack_pointer[-1] = b; DISPATCH(); } @@ -2856,15 +2868,15 @@ PyObject *fromlist = stack_pointer[-1]; PyObject *level = stack_pointer[-2]; PyObject *res; - #line 2031 "Python/bytecodes.c" + #line 2037 "Python/bytecodes.c" PyObject *name = GETITEM(frame->f_code->co_names, oparg); res = import_name(tstate, frame, name, fromlist, level); - #line 2863 "Python/generated_cases.c.h" + #line 2875 "Python/generated_cases.c.h" Py_DECREF(level); Py_DECREF(fromlist); - #line 2034 "Python/bytecodes.c" + #line 2040 "Python/bytecodes.c" if (res == NULL) goto pop_2_error; - #line 2868 "Python/generated_cases.c.h" + #line 2880 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = res; DISPATCH(); @@ -2873,29 +2885,29 @@ TARGET(IMPORT_FROM) { PyObject *from = stack_pointer[-1]; PyObject *res; - #line 2038 "Python/bytecodes.c" + #line 2044 "Python/bytecodes.c" PyObject *name = GETITEM(frame->f_code->co_names, oparg); res = import_from(tstate, from, name); if (res == NULL) goto error; - #line 2881 "Python/generated_cases.c.h" + #line 2893 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = res; DISPATCH(); } TARGET(JUMP_FORWARD) { - #line 2044 "Python/bytecodes.c" + #line 2050 "Python/bytecodes.c" JUMPBY(oparg); - #line 2890 "Python/generated_cases.c.h" + #line 2902 "Python/generated_cases.c.h" DISPATCH(); } TARGET(JUMP_BACKWARD) { PREDICTED(JUMP_BACKWARD); - #line 2048 "Python/bytecodes.c" + #line 2054 "Python/bytecodes.c" assert(oparg < INSTR_OFFSET()); JUMPBY(-oparg); - #line 2899 "Python/generated_cases.c.h" + #line 2911 "Python/generated_cases.c.h" CHECK_EVAL_BREAKER(); DISPATCH(); } @@ -2903,7 +2915,7 @@ TARGET(POP_JUMP_IF_FALSE) { PREDICTED(POP_JUMP_IF_FALSE); PyObject *cond = stack_pointer[-1]; - #line 2054 "Python/bytecodes.c" + #line 2060 "Python/bytecodes.c" if (Py_IsTrue(cond)) { _Py_DECREF_NO_DEALLOC(cond); } @@ -2913,9 +2925,9 @@ } else { int err = PyObject_IsTrue(cond); - #line 2917 "Python/generated_cases.c.h" + #line 2929 "Python/generated_cases.c.h" Py_DECREF(cond); - #line 2064 "Python/bytecodes.c" + #line 2070 "Python/bytecodes.c" if (err == 0) { JUMPBY(oparg); } @@ -2923,14 +2935,14 @@ if (err < 0) goto pop_1_error; } } - #line 2927 "Python/generated_cases.c.h" + #line 2939 "Python/generated_cases.c.h" STACK_SHRINK(1); DISPATCH(); } TARGET(POP_JUMP_IF_TRUE) { PyObject *cond = stack_pointer[-1]; - #line 2074 "Python/bytecodes.c" + #line 2080 "Python/bytecodes.c" if (Py_IsFalse(cond)) { _Py_DECREF_NO_DEALLOC(cond); } @@ -2940,9 +2952,9 @@ } else { int err = PyObject_IsTrue(cond); - #line 2944 "Python/generated_cases.c.h" + #line 2956 "Python/generated_cases.c.h" Py_DECREF(cond); - #line 2084 "Python/bytecodes.c" + #line 2090 "Python/bytecodes.c" if (err > 0) { JUMPBY(oparg); } @@ -2950,67 +2962,67 @@ if (err < 0) goto pop_1_error; } } - #line 2954 "Python/generated_cases.c.h" + #line 2966 "Python/generated_cases.c.h" STACK_SHRINK(1); DISPATCH(); } TARGET(POP_JUMP_IF_NOT_NONE) { PyObject *value = stack_pointer[-1]; - #line 2094 "Python/bytecodes.c" + #line 2100 "Python/bytecodes.c" if (!Py_IsNone(value)) { - #line 2963 "Python/generated_cases.c.h" + #line 2975 "Python/generated_cases.c.h" Py_DECREF(value); - #line 2096 "Python/bytecodes.c" + #line 2102 "Python/bytecodes.c" JUMPBY(oparg); } else { _Py_DECREF_NO_DEALLOC(value); } - #line 2971 "Python/generated_cases.c.h" + #line 2983 "Python/generated_cases.c.h" STACK_SHRINK(1); DISPATCH(); } TARGET(POP_JUMP_IF_NONE) { PyObject *value = stack_pointer[-1]; - #line 2104 "Python/bytecodes.c" + #line 2110 "Python/bytecodes.c" if (Py_IsNone(value)) { _Py_DECREF_NO_DEALLOC(value); JUMPBY(oparg); } else { - #line 2984 "Python/generated_cases.c.h" + #line 2996 "Python/generated_cases.c.h" Py_DECREF(value); - #line 2110 "Python/bytecodes.c" + #line 2116 "Python/bytecodes.c" } - #line 2988 "Python/generated_cases.c.h" + #line 3000 "Python/generated_cases.c.h" STACK_SHRINK(1); DISPATCH(); } TARGET(JUMP_BACKWARD_NO_INTERRUPT) { - #line 2114 "Python/bytecodes.c" + #line 2120 "Python/bytecodes.c" /* This bytecode is used in the `yield from` or `await` loop. * If there is an interrupt, we want it handled in the innermost * generator or coroutine, so we deliberately do not check it here. * (see bpo-30039). */ JUMPBY(-oparg); - #line 3001 "Python/generated_cases.c.h" + #line 3013 "Python/generated_cases.c.h" DISPATCH(); } TARGET(GET_LEN) { PyObject *obj = stack_pointer[-1]; PyObject *len_o; - #line 2123 "Python/bytecodes.c" + #line 2129 "Python/bytecodes.c" // PUSH(len(TOS)) Py_ssize_t len_i = PyObject_Length(obj); if (len_i < 0) goto error; len_o = PyLong_FromSsize_t(len_i); if (len_o == NULL) goto error; - #line 3014 "Python/generated_cases.c.h" + #line 3026 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = len_o; DISPATCH(); @@ -3021,16 +3033,16 @@ PyObject *type = stack_pointer[-2]; PyObject *subject = stack_pointer[-3]; PyObject *attrs; - #line 2131 "Python/bytecodes.c" + #line 2137 "Python/bytecodes.c" // Pop TOS and TOS1. Set TOS to a tuple of attributes on success, or // None on failure. assert(PyTuple_CheckExact(names)); attrs = match_class(tstate, subject, type, oparg, names); - #line 3030 "Python/generated_cases.c.h" + #line 3042 "Python/generated_cases.c.h" Py_DECREF(subject); Py_DECREF(type); Py_DECREF(names); - #line 2136 "Python/bytecodes.c" + #line 2142 "Python/bytecodes.c" if (attrs) { assert(PyTuple_CheckExact(attrs)); // Success! } @@ -3038,7 +3050,7 @@ if (_PyErr_Occurred(tstate)) goto pop_3_error; attrs = Py_NewRef(Py_None); // Failure! } - #line 3042 "Python/generated_cases.c.h" + #line 3054 "Python/generated_cases.c.h" STACK_SHRINK(2); stack_pointer[-1] = attrs; DISPATCH(); @@ -3047,10 +3059,10 @@ TARGET(MATCH_MAPPING) { PyObject *subject = stack_pointer[-1]; PyObject *res; - #line 2146 "Python/bytecodes.c" + #line 2152 "Python/bytecodes.c" int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_MAPPING; res = Py_NewRef(match ? Py_True : Py_False); - #line 3054 "Python/generated_cases.c.h" + #line 3066 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = res; PREDICT(POP_JUMP_IF_FALSE); @@ -3060,10 +3072,10 @@ TARGET(MATCH_SEQUENCE) { PyObject *subject = stack_pointer[-1]; PyObject *res; - #line 2152 "Python/bytecodes.c" + #line 2158 "Python/bytecodes.c" int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_SEQUENCE; res = Py_NewRef(match ? Py_True : Py_False); - #line 3067 "Python/generated_cases.c.h" + #line 3079 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = res; PREDICT(POP_JUMP_IF_FALSE); @@ -3074,11 +3086,11 @@ PyObject *keys = stack_pointer[-1]; PyObject *subject = stack_pointer[-2]; PyObject *values_or_none; - #line 2158 "Python/bytecodes.c" + #line 2164 "Python/bytecodes.c" // On successful match, PUSH(values). Otherwise, PUSH(None). values_or_none = match_keys(tstate, subject, keys); if (values_or_none == NULL) goto error; - #line 3082 "Python/generated_cases.c.h" + #line 3094 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = values_or_none; DISPATCH(); @@ -3087,14 +3099,14 @@ TARGET(GET_ITER) { PyObject *iterable = stack_pointer[-1]; PyObject *iter; - #line 2164 "Python/bytecodes.c" + #line 2170 "Python/bytecodes.c" /* before: [obj]; after [getiter(obj)] */ iter = PyObject_GetIter(iterable); - #line 3094 "Python/generated_cases.c.h" + #line 3106 "Python/generated_cases.c.h" Py_DECREF(iterable); - #line 2167 "Python/bytecodes.c" + #line 2173 "Python/bytecodes.c" if (iter == NULL) goto pop_1_error; - #line 3098 "Python/generated_cases.c.h" + #line 3110 "Python/generated_cases.c.h" stack_pointer[-1] = iter; DISPATCH(); } @@ -3102,7 +3114,7 @@ TARGET(GET_YIELD_FROM_ITER) { PyObject *iterable = stack_pointer[-1]; PyObject *iter; - #line 2171 "Python/bytecodes.c" + #line 2177 "Python/bytecodes.c" /* before: [obj]; after [getiter(obj)] */ if (PyCoro_CheckExact(iterable)) { /* `iterable` is a coroutine */ @@ -3125,11 +3137,11 @@ if (iter == NULL) { goto error; } - #line 3129 "Python/generated_cases.c.h" + #line 3141 "Python/generated_cases.c.h" Py_DECREF(iterable); - #line 2194 "Python/bytecodes.c" + #line 2200 "Python/bytecodes.c" } - #line 3133 "Python/generated_cases.c.h" + #line 3145 "Python/generated_cases.c.h" stack_pointer[-1] = iter; PREDICT(LOAD_CONST); DISPATCH(); @@ -3140,7 +3152,7 @@ static_assert(INLINE_CACHE_ENTRIES_FOR_ITER == 1, "incorrect cache size"); PyObject *iter = stack_pointer[-1]; PyObject *next; - #line 2213 "Python/bytecodes.c" + #line 2219 "Python/bytecodes.c" #if ENABLE_SPECIALIZATION _PyForIterCache *cache = (_PyForIterCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -3171,7 +3183,7 @@ DISPATCH(); } // Common case: no jump, leave it to the code generator - #line 3175 "Python/generated_cases.c.h" + #line 3187 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = next; next_instr += 1; @@ -3179,7 +3191,7 @@ } TARGET(INSTRUMENTED_FOR_ITER) { - #line 2246 "Python/bytecodes.c" + #line 2252 "Python/bytecodes.c" _Py_CODEUNIT *here = next_instr-1; _Py_CODEUNIT *target; PyObject *iter = TOP(); @@ -3205,14 +3217,14 @@ target = next_instr + INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1; } INSTRUMENTED_JUMP(here, target, PY_MONITORING_EVENT_BRANCH); - #line 3209 "Python/generated_cases.c.h" + #line 3221 "Python/generated_cases.c.h" DISPATCH(); } TARGET(FOR_ITER_LIST) { PyObject *iter = stack_pointer[-1]; PyObject *next; - #line 2274 "Python/bytecodes.c" + #line 2280 "Python/bytecodes.c" DEOPT_IF(Py_TYPE(iter) != &PyListIter_Type, FOR_ITER); _PyListIterObject *it = (_PyListIterObject *)iter; STAT_INC(FOR_ITER, hit); @@ -3232,7 +3244,7 @@ DISPATCH(); end_for_iter_list: // Common case: no jump, leave it to the code generator - #line 3236 "Python/generated_cases.c.h" + #line 3248 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = next; next_instr += 1; @@ -3242,7 +3254,7 @@ TARGET(FOR_ITER_TUPLE) { PyObject *iter = stack_pointer[-1]; PyObject *next; - #line 2296 "Python/bytecodes.c" + #line 2302 "Python/bytecodes.c" _PyTupleIterObject *it = (_PyTupleIterObject *)iter; DEOPT_IF(Py_TYPE(it) != &PyTupleIter_Type, FOR_ITER); STAT_INC(FOR_ITER, hit); @@ -3262,7 +3274,7 @@ DISPATCH(); end_for_iter_tuple: // Common case: no jump, leave it to the code generator - #line 3266 "Python/generated_cases.c.h" + #line 3278 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = next; next_instr += 1; @@ -3272,7 +3284,7 @@ TARGET(FOR_ITER_RANGE) { PyObject *iter = stack_pointer[-1]; PyObject *next; - #line 2318 "Python/bytecodes.c" + #line 2324 "Python/bytecodes.c" _PyRangeIterObject *r = (_PyRangeIterObject *)iter; DEOPT_IF(Py_TYPE(r) != &PyRangeIter_Type, FOR_ITER); STAT_INC(FOR_ITER, hit); @@ -3290,7 +3302,7 @@ if (next == NULL) { goto error; } - #line 3294 "Python/generated_cases.c.h" + #line 3306 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = next; next_instr += 1; @@ -3299,7 +3311,7 @@ TARGET(FOR_ITER_GEN) { PyObject *iter = stack_pointer[-1]; - #line 2338 "Python/bytecodes.c" + #line 2344 "Python/bytecodes.c" PyGenObject *gen = (PyGenObject *)iter; DEOPT_IF(Py_TYPE(gen) != &PyGen_Type, FOR_ITER); DEOPT_IF(gen->gi_frame_state >= FRAME_EXECUTING, FOR_ITER); @@ -3314,14 +3326,14 @@ assert(next_instr[oparg].op.code == END_FOR || next_instr[oparg].op.code == INSTRUMENTED_END_FOR); DISPATCH_INLINED(gen_frame); - #line 3318 "Python/generated_cases.c.h" + #line 3330 "Python/generated_cases.c.h" } TARGET(BEFORE_ASYNC_WITH) { PyObject *mgr = stack_pointer[-1]; PyObject *exit; PyObject *res; - #line 2355 "Python/bytecodes.c" + #line 2361 "Python/bytecodes.c" PyObject *enter = _PyObject_LookupSpecial(mgr, &_Py_ID(__aenter__)); if (enter == NULL) { if (!_PyErr_Occurred(tstate)) { @@ -3344,16 +3356,16 @@ Py_DECREF(enter); goto error; } - #line 3348 "Python/generated_cases.c.h" + #line 3360 "Python/generated_cases.c.h" Py_DECREF(mgr); - #line 2378 "Python/bytecodes.c" + #line 2384 "Python/bytecodes.c" res = _PyObject_CallNoArgs(enter); Py_DECREF(enter); if (res == NULL) { Py_DECREF(exit); if (true) goto pop_1_error; } - #line 3357 "Python/generated_cases.c.h" + #line 3369 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = res; stack_pointer[-2] = exit; @@ -3365,7 +3377,7 @@ PyObject *mgr = stack_pointer[-1]; PyObject *exit; PyObject *res; - #line 2388 "Python/bytecodes.c" + #line 2394 "Python/bytecodes.c" /* pop the context manager, push its __exit__ and the * value returned from calling its __enter__ */ @@ -3391,16 +3403,16 @@ Py_DECREF(enter); goto error; } - #line 3395 "Python/generated_cases.c.h" + #line 3407 "Python/generated_cases.c.h" Py_DECREF(mgr); - #line 2414 "Python/bytecodes.c" + #line 2420 "Python/bytecodes.c" res = _PyObject_CallNoArgs(enter); Py_DECREF(enter); if (res == NULL) { Py_DECREF(exit); if (true) goto pop_1_error; } - #line 3404 "Python/generated_cases.c.h" + #line 3416 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = res; stack_pointer[-2] = exit; @@ -3412,7 +3424,7 @@ PyObject *lasti = stack_pointer[-3]; PyObject *exit_func = stack_pointer[-4]; PyObject *res; - #line 2423 "Python/bytecodes.c" + #line 2429 "Python/bytecodes.c" /* At the top of the stack are 4 values: - val: TOP = exc_info() - unused: SECOND = previous exception @@ -3433,7 +3445,7 @@ res = PyObject_Vectorcall(exit_func, stack + 1, 3 | PY_VECTORCALL_ARGUMENTS_OFFSET, NULL); if (res == NULL) goto error; - #line 3437 "Python/generated_cases.c.h" + #line 3449 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = res; DISPATCH(); @@ -3442,7 +3454,7 @@ TARGET(PUSH_EXC_INFO) { PyObject *new_exc = stack_pointer[-1]; PyObject *prev_exc; - #line 2446 "Python/bytecodes.c" + #line 2452 "Python/bytecodes.c" _PyErr_StackItem *exc_info = tstate->exc_info; if (exc_info->exc_value != NULL) { prev_exc = exc_info->exc_value; @@ -3452,7 +3464,7 @@ } assert(PyExceptionInstance_Check(new_exc)); exc_info->exc_value = Py_NewRef(new_exc); - #line 3456 "Python/generated_cases.c.h" + #line 3468 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = new_exc; stack_pointer[-2] = prev_exc; @@ -3466,7 +3478,7 @@ uint32_t type_version = read_u32(&next_instr[1].cache); uint32_t keys_version = read_u32(&next_instr[3].cache); PyObject *descr = read_obj(&next_instr[5].cache); - #line 2458 "Python/bytecodes.c" + #line 2464 "Python/bytecodes.c" /* Cached method object */ PyTypeObject *self_cls = Py_TYPE(self); assert(type_version != 0); @@ -3483,7 +3495,7 @@ assert(_PyType_HasFeature(Py_TYPE(res2), Py_TPFLAGS_METHOD_DESCRIPTOR)); res = self; assert(oparg & 1); - #line 3487 "Python/generated_cases.c.h" + #line 3499 "Python/generated_cases.c.h" STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } @@ -3497,7 +3509,7 @@ PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); PyObject *descr = read_obj(&next_instr[5].cache); - #line 2477 "Python/bytecodes.c" + #line 2483 "Python/bytecodes.c" PyTypeObject *self_cls = Py_TYPE(self); DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); assert(self_cls->tp_dictoffset == 0); @@ -3507,7 +3519,7 @@ res2 = Py_NewRef(descr); res = self; assert(oparg & 1); - #line 3511 "Python/generated_cases.c.h" + #line 3523 "Python/generated_cases.c.h" STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } @@ -3521,7 +3533,7 @@ PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); PyObject *descr = read_obj(&next_instr[5].cache); - #line 2489 "Python/bytecodes.c" + #line 2495 "Python/bytecodes.c" PyTypeObject *self_cls = Py_TYPE(self); DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); Py_ssize_t dictoffset = self_cls->tp_dictoffset; @@ -3535,7 +3547,7 @@ res2 = Py_NewRef(descr); res = self; assert(oparg & 1); - #line 3539 "Python/generated_cases.c.h" + #line 3551 "Python/generated_cases.c.h" STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1] = res; if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } @@ -3544,16 +3556,16 @@ } TARGET(KW_NAMES) { - #line 2505 "Python/bytecodes.c" + #line 2511 "Python/bytecodes.c" assert(kwnames == NULL); assert(oparg < PyTuple_GET_SIZE(frame->f_code->co_consts)); kwnames = GETITEM(frame->f_code->co_consts, oparg); - #line 3552 "Python/generated_cases.c.h" + #line 3564 "Python/generated_cases.c.h" DISPATCH(); } TARGET(INSTRUMENTED_CALL) { - #line 2511 "Python/bytecodes.c" + #line 2517 "Python/bytecodes.c" int is_meth = PEEK(oparg+2) != NULL; int total_args = oparg + is_meth; PyObject *function = PEEK(total_args + 1); @@ -3566,7 +3578,7 @@ _PyCallCache *cache = (_PyCallCache *)next_instr; INCREMENT_ADAPTIVE_COUNTER(cache->counter); GO_TO_INSTRUCTION(CALL); - #line 3570 "Python/generated_cases.c.h" + #line 3582 "Python/generated_cases.c.h" } TARGET(CALL) { @@ -3576,7 +3588,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2556 "Python/bytecodes.c" + #line 2562 "Python/bytecodes.c" int is_meth = method != NULL; int total_args = oparg; if (is_meth) { @@ -3658,7 +3670,7 @@ Py_DECREF(args[i]); } if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 3662 "Python/generated_cases.c.h" + #line 3674 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -3670,7 +3682,7 @@ TARGET(CALL_BOUND_METHOD_EXACT_ARGS) { PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; - #line 2644 "Python/bytecodes.c" + #line 2650 "Python/bytecodes.c" DEOPT_IF(method != NULL, CALL); DEOPT_IF(Py_TYPE(callable) != &PyMethod_Type, CALL); STAT_INC(CALL, hit); @@ -3680,7 +3692,7 @@ PEEK(oparg + 2) = Py_NewRef(meth); // method Py_DECREF(callable); GO_TO_INSTRUCTION(CALL_PY_EXACT_ARGS); - #line 3684 "Python/generated_cases.c.h" + #line 3696 "Python/generated_cases.c.h" } TARGET(CALL_PY_EXACT_ARGS) { @@ -3689,7 +3701,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; uint32_t func_version = read_u32(&next_instr[1].cache); - #line 2656 "Python/bytecodes.c" + #line 2662 "Python/bytecodes.c" assert(kwnames == NULL); DEOPT_IF(tstate->interp->eval_frame, CALL); int is_meth = method != NULL; @@ -3715,7 +3727,7 @@ JUMPBY(INLINE_CACHE_ENTRIES_CALL); frame->return_offset = 0; DISPATCH_INLINED(new_frame); - #line 3719 "Python/generated_cases.c.h" + #line 3731 "Python/generated_cases.c.h" } TARGET(CALL_PY_WITH_DEFAULTS) { @@ -3723,7 +3735,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; uint32_t func_version = read_u32(&next_instr[1].cache); - #line 2684 "Python/bytecodes.c" + #line 2690 "Python/bytecodes.c" assert(kwnames == NULL); DEOPT_IF(tstate->interp->eval_frame, CALL); int is_meth = method != NULL; @@ -3759,7 +3771,7 @@ JUMPBY(INLINE_CACHE_ENTRIES_CALL); frame->return_offset = 0; DISPATCH_INLINED(new_frame); - #line 3763 "Python/generated_cases.c.h" + #line 3775 "Python/generated_cases.c.h" } TARGET(CALL_NO_KW_TYPE_1) { @@ -3767,7 +3779,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *null = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2722 "Python/bytecodes.c" + #line 2728 "Python/bytecodes.c" assert(kwnames == NULL); assert(oparg == 1); DEOPT_IF(null != NULL, CALL); @@ -3777,7 +3789,7 @@ res = Py_NewRef(Py_TYPE(obj)); Py_DECREF(obj); Py_DECREF(&PyType_Type); // I.e., callable - #line 3781 "Python/generated_cases.c.h" + #line 3793 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -3790,7 +3802,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *null = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2734 "Python/bytecodes.c" + #line 2740 "Python/bytecodes.c" assert(kwnames == NULL); assert(oparg == 1); DEOPT_IF(null != NULL, CALL); @@ -3801,7 +3813,7 @@ Py_DECREF(arg); Py_DECREF(&PyUnicode_Type); // I.e., callable if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 3805 "Python/generated_cases.c.h" + #line 3817 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -3815,7 +3827,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *null = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2748 "Python/bytecodes.c" + #line 2754 "Python/bytecodes.c" assert(kwnames == NULL); assert(oparg == 1); DEOPT_IF(null != NULL, CALL); @@ -3826,7 +3838,7 @@ Py_DECREF(arg); Py_DECREF(&PyTuple_Type); // I.e., tuple if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 3830 "Python/generated_cases.c.h" + #line 3842 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -3840,7 +3852,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2762 "Python/bytecodes.c" + #line 2768 "Python/bytecodes.c" int is_meth = method != NULL; int total_args = oparg; if (is_meth) { @@ -3862,7 +3874,7 @@ } Py_DECREF(tp); if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 3866 "Python/generated_cases.c.h" + #line 3878 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -3876,7 +3888,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2787 "Python/bytecodes.c" + #line 2793 "Python/bytecodes.c" /* Builtin METH_O functions */ assert(kwnames == NULL); int is_meth = method != NULL; @@ -3904,7 +3916,7 @@ Py_DECREF(arg); Py_DECREF(callable); if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 3908 "Python/generated_cases.c.h" + #line 3920 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -3918,7 +3930,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2818 "Python/bytecodes.c" + #line 2824 "Python/bytecodes.c" /* Builtin METH_FASTCALL functions, without keywords */ assert(kwnames == NULL); int is_meth = method != NULL; @@ -3950,7 +3962,7 @@ 'invalid'). In those cases an exception is set, so we must handle it. */ - #line 3954 "Python/generated_cases.c.h" + #line 3966 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -3964,7 +3976,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2853 "Python/bytecodes.c" + #line 2859 "Python/bytecodes.c" /* Builtin METH_FASTCALL | METH_KEYWORDS functions */ int is_meth = method != NULL; int total_args = oparg; @@ -3996,7 +4008,7 @@ } Py_DECREF(callable); if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 4000 "Python/generated_cases.c.h" + #line 4012 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -4010,7 +4022,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2888 "Python/bytecodes.c" + #line 2894 "Python/bytecodes.c" assert(kwnames == NULL); /* len(o) */ int is_meth = method != NULL; @@ -4035,7 +4047,7 @@ Py_DECREF(callable); Py_DECREF(arg); if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 4039 "Python/generated_cases.c.h" + #line 4051 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -4048,7 +4060,7 @@ PyObject *callable = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2915 "Python/bytecodes.c" + #line 2921 "Python/bytecodes.c" assert(kwnames == NULL); /* isinstance(o, o2) */ int is_meth = method != NULL; @@ -4075,7 +4087,7 @@ Py_DECREF(cls); Py_DECREF(callable); if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 4079 "Python/generated_cases.c.h" + #line 4091 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -4087,7 +4099,7 @@ PyObject **args = (stack_pointer - oparg); PyObject *self = stack_pointer[-(1 + oparg)]; PyObject *method = stack_pointer[-(2 + oparg)]; - #line 2945 "Python/bytecodes.c" + #line 2951 "Python/bytecodes.c" assert(kwnames == NULL); assert(oparg == 1); assert(method != NULL); @@ -4105,14 +4117,14 @@ JUMPBY(INLINE_CACHE_ENTRIES_CALL + 1); assert(next_instr[-1].op.code == POP_TOP); DISPATCH(); - #line 4109 "Python/generated_cases.c.h" + #line 4121 "Python/generated_cases.c.h" } TARGET(CALL_NO_KW_METHOD_DESCRIPTOR_O) { PyObject **args = (stack_pointer - oparg); PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2965 "Python/bytecodes.c" + #line 2971 "Python/bytecodes.c" assert(kwnames == NULL); int is_meth = method != NULL; int total_args = oparg; @@ -4143,7 +4155,7 @@ Py_DECREF(arg); Py_DECREF(callable); if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 4147 "Python/generated_cases.c.h" + #line 4159 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -4156,7 +4168,7 @@ PyObject **args = (stack_pointer - oparg); PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 2999 "Python/bytecodes.c" + #line 3005 "Python/bytecodes.c" int is_meth = method != NULL; int total_args = oparg; if (is_meth) { @@ -4185,7 +4197,7 @@ } Py_DECREF(callable); if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 4189 "Python/generated_cases.c.h" + #line 4201 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -4198,7 +4210,7 @@ PyObject **args = (stack_pointer - oparg); PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 3031 "Python/bytecodes.c" + #line 3037 "Python/bytecodes.c" assert(kwnames == NULL); assert(oparg == 0 || oparg == 1); int is_meth = method != NULL; @@ -4227,7 +4239,7 @@ Py_DECREF(self); Py_DECREF(callable); if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 4231 "Python/generated_cases.c.h" + #line 4243 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -4240,7 +4252,7 @@ PyObject **args = (stack_pointer - oparg); PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; - #line 3063 "Python/bytecodes.c" + #line 3069 "Python/bytecodes.c" assert(kwnames == NULL); int is_meth = method != NULL; int total_args = oparg; @@ -4268,7 +4280,7 @@ } Py_DECREF(callable); if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } - #line 4272 "Python/generated_cases.c.h" + #line 4284 "Python/generated_cases.c.h" STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -4278,9 +4290,9 @@ } TARGET(INSTRUMENTED_CALL_FUNCTION_EX) { - #line 3094 "Python/bytecodes.c" + #line 3100 "Python/bytecodes.c" GO_TO_INSTRUCTION(CALL_FUNCTION_EX); - #line 4284 "Python/generated_cases.c.h" + #line 4296 "Python/generated_cases.c.h" } TARGET(CALL_FUNCTION_EX) { @@ -4289,7 +4301,7 @@ PyObject *callargs = stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))]; PyObject *func = stack_pointer[-(2 + ((oparg & 1) ? 1 : 0))]; PyObject *result; - #line 3098 "Python/bytecodes.c" + #line 3104 "Python/bytecodes.c" // DICT_MERGE is called before this opcode if there are kwargs. // It converts all dict subtypes in kwargs into regular dicts. assert(kwargs == NULL || PyDict_CheckExact(kwargs)); @@ -4351,14 +4363,14 @@ } result = PyObject_Call(func, callargs, kwargs); } - #line 4355 "Python/generated_cases.c.h" + #line 4367 "Python/generated_cases.c.h" Py_DECREF(func); Py_DECREF(callargs); Py_XDECREF(kwargs); - #line 3160 "Python/bytecodes.c" + #line 3166 "Python/bytecodes.c" assert(PEEK(3 + (oparg & 1)) == NULL); if (result == NULL) { STACK_SHRINK(((oparg & 1) ? 1 : 0)); goto pop_3_error; } - #line 4362 "Python/generated_cases.c.h" + #line 4374 "Python/generated_cases.c.h" STACK_SHRINK(((oparg & 1) ? 1 : 0)); STACK_SHRINK(2); stack_pointer[-1] = result; @@ -4373,7 +4385,7 @@ PyObject *kwdefaults = (oparg & 0x02) ? stack_pointer[-(1 + ((oparg & 0x08) ? 1 : 0) + ((oparg & 0x04) ? 1 : 0) + ((oparg & 0x02) ? 1 : 0))] : NULL; PyObject *defaults = (oparg & 0x01) ? stack_pointer[-(1 + ((oparg & 0x08) ? 1 : 0) + ((oparg & 0x04) ? 1 : 0) + ((oparg & 0x02) ? 1 : 0) + ((oparg & 0x01) ? 1 : 0))] : NULL; PyObject *func; - #line 3170 "Python/bytecodes.c" + #line 3176 "Python/bytecodes.c" PyFunctionObject *func_obj = (PyFunctionObject *) PyFunction_New(codeobj, GLOBALS()); @@ -4402,14 +4414,14 @@ func_obj->func_version = ((PyCodeObject *)codeobj)->co_version; func = (PyObject *)func_obj; - #line 4406 "Python/generated_cases.c.h" + #line 4418 "Python/generated_cases.c.h" STACK_SHRINK(((oparg & 0x01) ? 1 : 0) + ((oparg & 0x02) ? 1 : 0) + ((oparg & 0x04) ? 1 : 0) + ((oparg & 0x08) ? 1 : 0)); stack_pointer[-1] = func; DISPATCH(); } TARGET(RETURN_GENERATOR) { - #line 3201 "Python/bytecodes.c" + #line 3207 "Python/bytecodes.c" assert(PyFunction_Check(frame->f_funcobj)); PyFunctionObject *func = (PyFunctionObject *)frame->f_funcobj; PyGenObject *gen = (PyGenObject *)_Py_MakeCoro(func); @@ -4430,7 +4442,7 @@ frame = cframe.current_frame = prev; _PyFrame_StackPush(frame, (PyObject *)gen); goto resume_frame; - #line 4434 "Python/generated_cases.c.h" + #line 4446 "Python/generated_cases.c.h" } TARGET(BUILD_SLICE) { @@ -4438,15 +4450,15 @@ PyObject *stop = stack_pointer[-(1 + ((oparg == 3) ? 1 : 0))]; PyObject *start = stack_pointer[-(2 + ((oparg == 3) ? 1 : 0))]; PyObject *slice; - #line 3224 "Python/bytecodes.c" + #line 3230 "Python/bytecodes.c" slice = PySlice_New(start, stop, step); - #line 4444 "Python/generated_cases.c.h" + #line 4456 "Python/generated_cases.c.h" Py_DECREF(start); Py_DECREF(stop); Py_XDECREF(step); - #line 3226 "Python/bytecodes.c" + #line 3232 "Python/bytecodes.c" if (slice == NULL) { STACK_SHRINK(((oparg == 3) ? 1 : 0)); goto pop_2_error; } - #line 4450 "Python/generated_cases.c.h" + #line 4462 "Python/generated_cases.c.h" STACK_SHRINK(((oparg == 3) ? 1 : 0)); STACK_SHRINK(1); stack_pointer[-1] = slice; @@ -4457,7 +4469,7 @@ PyObject *fmt_spec = ((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? stack_pointer[-((((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0))] : NULL; PyObject *value = stack_pointer[-(1 + (((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0))]; PyObject *result; - #line 3230 "Python/bytecodes.c" + #line 3236 "Python/bytecodes.c" /* Handles f-string value formatting. */ PyObject *(*conv_fn)(PyObject *); int which_conversion = oparg & FVC_MASK; @@ -4492,7 +4504,7 @@ Py_DECREF(value); Py_XDECREF(fmt_spec); if (result == NULL) { STACK_SHRINK((((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0)); goto pop_1_error; } - #line 4496 "Python/generated_cases.c.h" + #line 4508 "Python/generated_cases.c.h" STACK_SHRINK((((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0)); stack_pointer[-1] = result; DISPATCH(); @@ -4501,10 +4513,10 @@ TARGET(COPY) { PyObject *bottom = stack_pointer[-(1 + (oparg-1))]; PyObject *top; - #line 3267 "Python/bytecodes.c" + #line 3273 "Python/bytecodes.c" assert(oparg > 0); top = Py_NewRef(bottom); - #line 4508 "Python/generated_cases.c.h" + #line 4520 "Python/generated_cases.c.h" STACK_GROW(1); stack_pointer[-1] = top; DISPATCH(); @@ -4516,7 +4528,7 @@ PyObject *rhs = stack_pointer[-1]; PyObject *lhs = stack_pointer[-2]; PyObject *res; - #line 3272 "Python/bytecodes.c" + #line 3278 "Python/bytecodes.c" #if ENABLE_SPECIALIZATION _PyBinaryOpCache *cache = (_PyBinaryOpCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -4531,12 +4543,12 @@ assert((unsigned)oparg < Py_ARRAY_LENGTH(binary_ops)); assert(binary_ops[oparg]); res = binary_ops[oparg](lhs, rhs); - #line 4535 "Python/generated_cases.c.h" + #line 4547 "Python/generated_cases.c.h" Py_DECREF(lhs); Py_DECREF(rhs); - #line 3287 "Python/bytecodes.c" + #line 3293 "Python/bytecodes.c" if (res == NULL) goto pop_2_error; - #line 4540 "Python/generated_cases.c.h" + #line 4552 "Python/generated_cases.c.h" STACK_SHRINK(1); stack_pointer[-1] = res; next_instr += 1; @@ -4546,16 +4558,16 @@ TARGET(SWAP) { PyObject *top = stack_pointer[-1]; PyObject *bottom = stack_pointer[-(2 + (oparg-2))]; - #line 3292 "Python/bytecodes.c" + #line 3298 "Python/bytecodes.c" assert(oparg >= 2); - #line 4552 "Python/generated_cases.c.h" + #line 4564 "Python/generated_cases.c.h" stack_pointer[-1] = bottom; stack_pointer[-(2 + (oparg-2))] = top; DISPATCH(); } TARGET(INSTRUMENTED_LINE) { - #line 3296 "Python/bytecodes.c" + #line 3302 "Python/bytecodes.c" _Py_CODEUNIT *here = next_instr-1; _PyFrame_SetStackPointer(frame, stack_pointer); int original_opcode = _Py_call_instrumentation_line( @@ -4575,11 +4587,11 @@ } opcode = original_opcode; DISPATCH_GOTO(); - #line 4579 "Python/generated_cases.c.h" + #line 4591 "Python/generated_cases.c.h" } TARGET(INSTRUMENTED_INSTRUCTION) { - #line 3318 "Python/bytecodes.c" + #line 3324 "Python/bytecodes.c" int next_opcode = _Py_call_instrumentation_instruction( tstate, frame, next_instr-1); if (next_opcode < 0) goto error; @@ -4591,26 +4603,26 @@ assert(next_opcode > 0 && next_opcode < 256); opcode = next_opcode; DISPATCH_GOTO(); - #line 4595 "Python/generated_cases.c.h" + #line 4607 "Python/generated_cases.c.h" } TARGET(INSTRUMENTED_JUMP_FORWARD) { - #line 3332 "Python/bytecodes.c" + #line 3338 "Python/bytecodes.c" INSTRUMENTED_JUMP(next_instr-1, next_instr+oparg, PY_MONITORING_EVENT_JUMP); - #line 4601 "Python/generated_cases.c.h" + #line 4613 "Python/generated_cases.c.h" DISPATCH(); } TARGET(INSTRUMENTED_JUMP_BACKWARD) { - #line 3336 "Python/bytecodes.c" + #line 3342 "Python/bytecodes.c" INSTRUMENTED_JUMP(next_instr-1, next_instr-oparg, PY_MONITORING_EVENT_JUMP); - #line 4608 "Python/generated_cases.c.h" + #line 4620 "Python/generated_cases.c.h" CHECK_EVAL_BREAKER(); DISPATCH(); } TARGET(INSTRUMENTED_POP_JUMP_IF_TRUE) { - #line 3341 "Python/bytecodes.c" + #line 3347 "Python/bytecodes.c" PyObject *cond = POP(); int err = PyObject_IsTrue(cond); Py_DECREF(cond); @@ -4619,12 +4631,12 @@ assert(err == 0 || err == 1); int offset = err*oparg; INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH); - #line 4623 "Python/generated_cases.c.h" + #line 4635 "Python/generated_cases.c.h" DISPATCH(); } TARGET(INSTRUMENTED_POP_JUMP_IF_FALSE) { - #line 3352 "Python/bytecodes.c" + #line 3358 "Python/bytecodes.c" PyObject *cond = POP(); int err = PyObject_IsTrue(cond); Py_DECREF(cond); @@ -4633,12 +4645,12 @@ assert(err == 0 || err == 1); int offset = (1-err)*oparg; INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH); - #line 4637 "Python/generated_cases.c.h" + #line 4649 "Python/generated_cases.c.h" DISPATCH(); } TARGET(INSTRUMENTED_POP_JUMP_IF_NONE) { - #line 3363 "Python/bytecodes.c" + #line 3369 "Python/bytecodes.c" PyObject *value = POP(); _Py_CODEUNIT *here = next_instr-1; int offset; @@ -4651,12 +4663,12 @@ offset = 0; } INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH); - #line 4655 "Python/generated_cases.c.h" + #line 4667 "Python/generated_cases.c.h" DISPATCH(); } TARGET(INSTRUMENTED_POP_JUMP_IF_NOT_NONE) { - #line 3378 "Python/bytecodes.c" + #line 3384 "Python/bytecodes.c" PyObject *value = POP(); _Py_CODEUNIT *here = next_instr-1; int offset; @@ -4669,30 +4681,30 @@ offset = oparg; } INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH); - #line 4673 "Python/generated_cases.c.h" + #line 4685 "Python/generated_cases.c.h" DISPATCH(); } TARGET(EXTENDED_ARG) { - #line 3393 "Python/bytecodes.c" + #line 3399 "Python/bytecodes.c" assert(oparg); opcode = next_instr->op.code; oparg = oparg << 8 | next_instr->op.arg; PRE_DISPATCH_GOTO(); DISPATCH_GOTO(); - #line 4684 "Python/generated_cases.c.h" + #line 4696 "Python/generated_cases.c.h" } TARGET(CACHE) { - #line 3401 "Python/bytecodes.c" + #line 3407 "Python/bytecodes.c" assert(0 && "Executing a cache."); Py_UNREACHABLE(); - #line 4691 "Python/generated_cases.c.h" + #line 4703 "Python/generated_cases.c.h" } TARGET(RESERVED) { - #line 3406 "Python/bytecodes.c" + #line 3412 "Python/bytecodes.c" assert(0 && "Executing RESERVED instruction."); Py_UNREACHABLE(); - #line 4698 "Python/generated_cases.c.h" + #line 4710 "Python/generated_cases.c.h" } diff --git a/Python/import.c b/Python/import.c index 0bf107b28d3990..9e1857d5f3e4e6 100644 --- a/Python/import.c +++ b/Python/import.c @@ -3840,6 +3840,7 @@ imp_module_exec(PyObject *module) static PyModuleDef_Slot imp_slots[] = { {Py_mod_exec, imp_module_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Python/instrumentation.c b/Python/instrumentation.c index 89af58e9549398..ca9951fceaa13b 100644 --- a/Python/instrumentation.c +++ b/Python/instrumentation.c @@ -1482,25 +1482,25 @@ _Py_Instrument(PyCodeObject *code, PyInterpreterState *interp) } } } - uint8_t new_line_tools = new_events.tools[PY_MONITORING_EVENT_LINE]; + + // GH-103845: We need to remove both the line and instruction instrumentation before + // adding new ones, otherwise we may remove the newly added instrumentation. + uint8_t removed_line_tools = removed_events.tools[PY_MONITORING_EVENT_LINE]; - if (new_line_tools | removed_line_tools) { + uint8_t removed_per_instruction_tools = removed_events.tools[PY_MONITORING_EVENT_INSTRUCTION]; + + if (removed_line_tools) { _PyCoLineInstrumentationData *line_data = code->_co_monitoring->lines; for (int i = code->_co_firsttraceable; i < code_len;) { if (line_data[i].original_opcode) { if (removed_line_tools) { remove_line_tools(code, i, removed_line_tools); } - if (new_line_tools) { - add_line_tools(code, i, new_line_tools); - } } i += instruction_length(code, i); } } - uint8_t new_per_instruction_tools = new_events.tools[PY_MONITORING_EVENT_INSTRUCTION]; - uint8_t removed_per_instruction_tools = removed_events.tools[PY_MONITORING_EVENT_INSTRUCTION]; - if (new_per_instruction_tools | removed_per_instruction_tools) { + if (removed_per_instruction_tools) { for (int i = code->_co_firsttraceable; i < code_len;) { int opcode = _Py_GetBaseOpcode(code, i); if (opcode == RESUME || opcode == END_FOR) { @@ -1510,6 +1510,31 @@ _Py_Instrument(PyCodeObject *code, PyInterpreterState *interp) if (removed_per_instruction_tools) { remove_per_instruction_tools(code, i, removed_per_instruction_tools); } + i += instruction_length(code, i); + } + } + + uint8_t new_line_tools = new_events.tools[PY_MONITORING_EVENT_LINE]; + uint8_t new_per_instruction_tools = new_events.tools[PY_MONITORING_EVENT_INSTRUCTION]; + + if (new_line_tools) { + _PyCoLineInstrumentationData *line_data = code->_co_monitoring->lines; + for (int i = code->_co_firsttraceable; i < code_len;) { + if (line_data[i].original_opcode) { + if (new_line_tools) { + add_line_tools(code, i, new_line_tools); + } + } + i += instruction_length(code, i); + } + } + if (new_per_instruction_tools) { + for (int i = code->_co_firsttraceable; i < code_len;) { + int opcode = _Py_GetBaseOpcode(code, i); + if (opcode == RESUME || opcode == END_FOR) { + i += instruction_length(code, i); + continue; + } if (new_per_instruction_tools) { add_per_instruction_tools(code, i, new_per_instruction_tools); } diff --git a/Python/marshal.c b/Python/marshal.c index 2966139cec9ae9..208996b05fc484 100644 --- a/Python/marshal.c +++ b/Python/marshal.c @@ -1870,6 +1870,7 @@ marshal_module_exec(PyObject *mod) static PyModuleDef_Slot marshalmodule_slots[] = { {Py_mod_exec, marshal_module_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Python/opcode_metadata.h b/Python/opcode_metadata.h index 4cee71459a69de..7b0f4dd27de378 100644 --- a/Python/opcode_metadata.h +++ b/Python/opcode_metadata.h @@ -21,6 +21,8 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) { return 0; case LOAD_FAST: return 0; + case LOAD_FAST_AND_CLEAR: + return 0; case LOAD_CONST: return 0; case STORE_FAST: @@ -411,6 +413,8 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { return 1; case LOAD_FAST: return 1; + case LOAD_FAST_AND_CLEAR: + return 1; case LOAD_CONST: return 1; case STORE_FAST: @@ -799,6 +803,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[256] = { [LOAD_CLOSURE] = { true, INSTR_FMT_IB }, [LOAD_FAST_CHECK] = { true, INSTR_FMT_IB }, [LOAD_FAST] = { true, INSTR_FMT_IB }, + [LOAD_FAST_AND_CLEAR] = { true, INSTR_FMT_IB }, [LOAD_CONST] = { true, INSTR_FMT_IB }, [STORE_FAST] = { true, INSTR_FMT_IB }, [LOAD_FAST__LOAD_FAST] = { true, INSTR_FMT_IBIB }, diff --git a/Python/opcode_targets.h b/Python/opcode_targets.h index 1bb9004546a3e9..680a61cf17a84f 100644 --- a/Python/opcode_targets.h +++ b/Python/opcode_targets.h @@ -142,7 +142,7 @@ static void *opcode_targets[256] = { &&TARGET_JUMP_BACKWARD, &&TARGET_LOAD_SUPER_ATTR, &&TARGET_CALL_FUNCTION_EX, - &&TARGET_STORE_FAST__LOAD_FAST, + &&TARGET_LOAD_FAST_AND_CLEAR, &&TARGET_EXTENDED_ARG, &&TARGET_LIST_APPEND, &&TARGET_SET_ADD, @@ -152,24 +152,24 @@ static void *opcode_targets[256] = { &&TARGET_YIELD_VALUE, &&TARGET_RESUME, &&TARGET_MATCH_CLASS, + &&TARGET_STORE_FAST__LOAD_FAST, &&TARGET_STORE_FAST__STORE_FAST, - &&TARGET_STORE_SUBSCR_DICT, &&TARGET_FORMAT_VALUE, &&TARGET_BUILD_CONST_KEY_MAP, &&TARGET_BUILD_STRING, + &&TARGET_STORE_SUBSCR_DICT, &&TARGET_STORE_SUBSCR_LIST_INT, &&TARGET_UNPACK_SEQUENCE_LIST, &&TARGET_UNPACK_SEQUENCE_TUPLE, - &&TARGET_UNPACK_SEQUENCE_TWO_TUPLE, &&TARGET_LIST_EXTEND, &&TARGET_SET_UPDATE, &&TARGET_DICT_MERGE, &&TARGET_DICT_UPDATE, + &&TARGET_UNPACK_SEQUENCE_TWO_TUPLE, &&TARGET_SEND_GEN, &&_unknown_opcode, &&_unknown_opcode, &&_unknown_opcode, - &&_unknown_opcode, &&TARGET_CALL, &&TARGET_KW_NAMES, &&TARGET_CALL_INTRINSIC_1, diff --git a/Python/perf_trampoline.c b/Python/perf_trampoline.c index 1957ab82c33951..3b183280e1f24c 100644 --- a/Python/perf_trampoline.c +++ b/Python/perf_trampoline.c @@ -253,7 +253,7 @@ perf_map_write_entry(void *state, const void *code_addr, NULL); return; } - fprintf(method_file, "%p %x py::%s:%s\n", code_addr, code_size, entry, + fprintf(method_file, "%" PRIxPTR " %x py::%s:%s\n", (uintptr_t) code_addr, code_size, entry, filename); fflush(method_file); } diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index ba248d208e425a..61f87c5eba60ed 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -546,7 +546,8 @@ pycore_init_runtime(_PyRuntimeState *runtime, static PyStatus -init_interp_settings(PyInterpreterState *interp, const _PyInterpreterConfig *config) +init_interp_settings(PyInterpreterState *interp, + const PyInterpreterConfig *config) { assert(interp->feature_flags == 0); @@ -584,12 +585,13 @@ init_interp_settings(PyInterpreterState *interp, const _PyInterpreterConfig *con static PyStatus -init_interp_create_gil(PyThreadState *tstate) +init_interp_create_gil(PyThreadState *tstate, int own_gil) { PyStatus status; /* finalize_interp_delete() comment explains why _PyEval_FiniGIL() is only called here. */ + // XXX This is broken with a per-interpreter GIL. _PyEval_FiniGIL(tstate->interp); /* Auto-thread-state API */ @@ -599,7 +601,7 @@ init_interp_create_gil(PyThreadState *tstate) } /* Create the GIL and take it */ - status = _PyEval_InitGIL(tstate); + status = _PyEval_InitGIL(tstate, own_gil); if (_PyStatus_EXCEPTION(status)) { return status; } @@ -631,7 +633,9 @@ pycore_create_interpreter(_PyRuntimeState *runtime, return status; } - const _PyInterpreterConfig config = _PyInterpreterConfig_LEGACY_INIT; + PyInterpreterConfig config = _PyInterpreterConfig_LEGACY_INIT; + // The main interpreter always has its own GIL. + config.own_gil = 1; status = init_interp_settings(interp, &config); if (_PyStatus_EXCEPTION(status)) { return status; @@ -642,9 +646,10 @@ pycore_create_interpreter(_PyRuntimeState *runtime, return _PyStatus_ERR("can't make first thread"); } _PyThreadState_Bind(tstate); - (void) PyThreadState_Swap(tstate); + // XXX For now we do this before the GIL is created. + (void) _PyThreadState_SwapNoGIL(tstate); - status = init_interp_create_gil(tstate); + status = init_interp_create_gil(tstate, config.own_gil); if (_PyStatus_EXCEPTION(status)) { return status; } @@ -1302,8 +1307,7 @@ _Py_InitializeMain(void) if (_PyStatus_EXCEPTION(status)) { return status; } - _PyRuntimeState *runtime = &_PyRuntime; - PyThreadState *tstate = _PyRuntimeState_GetThreadState(runtime); + PyThreadState *tstate = _PyThreadState_GET(); return pyinit_main(tstate); } @@ -1663,8 +1667,10 @@ flush_std_files(void) static void finalize_interp_types(PyInterpreterState *interp) { + _PyIO_FiniTypes(interp); + _PyUnicode_FiniTypes(interp); - _PySys_Fini(interp); + _PySys_FiniTypes(interp); _PyExc_Fini(interp); _PyAsyncGen_Fini(interp); _PyContext_Fini(interp); @@ -1706,8 +1712,6 @@ finalize_interp_clear(PyThreadState *tstate) /* Clear interpreter state and all thread states */ _PyInterpreterState_Clear(tstate); - _PyIO_FiniTypes(tstate->interp); - /* Clear all loghooks */ /* Both _PySys_Audit function and users still need PyObject, such as tuple. Call _PySys_ClearAuditHooks when PyObject available. */ @@ -1754,7 +1758,7 @@ Py_FinalizeEx(void) } /* Get current thread state and interpreter pointer */ - PyThreadState *tstate = _PyRuntimeState_GetThreadState(runtime); + PyThreadState *tstate = _PyThreadState_GET(); // XXX assert(_Py_IsMainInterpreter(tstate->interp)); // XXX assert(_Py_IsMainThread()); @@ -1991,7 +1995,7 @@ Py_Finalize(void) */ static PyStatus -new_interpreter(PyThreadState **tstate_p, const _PyInterpreterConfig *config) +new_interpreter(PyThreadState **tstate_p, const PyInterpreterConfig *config) { PyStatus status; @@ -2023,11 +2027,20 @@ new_interpreter(PyThreadState **tstate_p, const _PyInterpreterConfig *config) } _PyThreadState_Bind(tstate); - PyThreadState *save_tstate = PyThreadState_Swap(tstate); + // XXX For now we do this before the GIL is created. + PyThreadState *save_tstate = _PyThreadState_SwapNoGIL(tstate); + int has_gil = 0; + + /* From this point until the init_interp_create_gil() call, + we must not do anything that requires that the GIL be held + (or otherwise exist). That applies whether or not the new + interpreter has its own GIL (e.g. the main interpreter). */ /* Copy the current interpreter config into the new interpreter */ const PyConfig *src_config; if (save_tstate != NULL) { + // XXX Might new_interpreter() have been called without the GIL held? + _PyEval_ReleaseLock(save_tstate); src_config = _PyInterpreterState_GetConfig(save_tstate->interp); } else @@ -2037,20 +2050,23 @@ new_interpreter(PyThreadState **tstate_p, const _PyInterpreterConfig *config) src_config = _PyInterpreterState_GetConfig(main_interp); } + /* This does not require that the GIL be held. */ status = _PyConfig_Copy(&interp->config, src_config); if (_PyStatus_EXCEPTION(status)) { goto error; } + /* This does not require that the GIL be held. */ status = init_interp_settings(interp, config); if (_PyStatus_EXCEPTION(status)) { goto error; } - status = init_interp_create_gil(tstate); + status = init_interp_create_gil(tstate, config->own_gil); if (_PyStatus_EXCEPTION(status)) { goto error; } + has_gil = 1; status = pycore_interp_init(tstate); if (_PyStatus_EXCEPTION(status)) { @@ -2070,7 +2086,12 @@ new_interpreter(PyThreadState **tstate_p, const _PyInterpreterConfig *config) /* Oops, it didn't work. Undo it all. */ PyErr_PrintEx(0); - PyThreadState_Swap(save_tstate); + if (has_gil) { + PyThreadState_Swap(save_tstate); + } + else { + _PyThreadState_SwapNoGIL(save_tstate); + } PyThreadState_Clear(tstate); PyThreadState_Delete(tstate); PyInterpreterState_Delete(interp); @@ -2079,8 +2100,8 @@ new_interpreter(PyThreadState **tstate_p, const _PyInterpreterConfig *config) } PyStatus -_Py_NewInterpreterFromConfig(PyThreadState **tstate_p, - const _PyInterpreterConfig *config) +Py_NewInterpreterFromConfig(PyThreadState **tstate_p, + const PyInterpreterConfig *config) { return new_interpreter(tstate_p, config); } @@ -2089,8 +2110,8 @@ PyThreadState * Py_NewInterpreter(void) { PyThreadState *tstate = NULL; - const _PyInterpreterConfig config = _PyInterpreterConfig_LEGACY_INIT; - PyStatus status = _Py_NewInterpreterFromConfig(&tstate, &config); + const PyInterpreterConfig config = _PyInterpreterConfig_LEGACY_INIT; + PyStatus status = new_interpreter(&tstate, &config); if (_PyStatus_EXCEPTION(status)) { Py_ExitStatusException(status); } @@ -2799,7 +2820,7 @@ fatal_error(int fd, int header, const char *prefix, const char *msg, tss_tstate != tstate if the current Python thread does not hold the GIL. */ - PyThreadState *tstate = _PyRuntimeState_GetThreadState(runtime); + PyThreadState *tstate = _PyThreadState_GET(); PyInterpreterState *interp = NULL; PyThreadState *tss_tstate = PyGILState_GetThisThreadState(); if (tstate != NULL) { diff --git a/Python/pystate.c b/Python/pystate.c index f103a059f0f369..26debf1f88b94a 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -425,8 +425,6 @@ init_runtime(_PyRuntimeState *runtime, runtime->open_code_userdata = open_code_userdata; runtime->audit_hook_head = audit_hook_head; - _PyEval_InitRuntimeState(&runtime->ceval); - PyPreConfig_InitPythonConfig(&runtime->preconfig); PyThread_type_lock *lockptrs[NUMLOCKS] = { @@ -682,7 +680,7 @@ init_interpreter(PyInterpreterState *interp, memcpy(&interp->obmalloc.pools.used, temp, sizeof(temp)); } - _PyEval_InitState(&interp->ceval, pending_lock); + _PyEval_InitState(interp, pending_lock); _PyGC_InitState(&interp->gc); PyConfig_InitPythonConfig(&interp->config); _PyType_InitCache(interp); @@ -1809,7 +1807,7 @@ int PyThreadState_SetAsyncExc(unsigned long id, PyObject *exc) { _PyRuntimeState *runtime = &_PyRuntime; - PyInterpreterState *interp = _PyRuntimeState_GetThreadState(runtime)->interp; + PyInterpreterState *interp = _PyInterpreterState_GET(); /* Although the GIL is held, a few C API functions can be called * without the GIL held, and in particular some that create and @@ -1863,17 +1861,11 @@ PyThreadState_Get(void) } -PyThreadState * -_PyThreadState_Swap(_PyRuntimeState *runtime, PyThreadState *newts) +static void +_swap_thread_states(_PyRuntimeState *runtime, + PyThreadState *oldts, PyThreadState *newts) { -#if defined(Py_DEBUG) - /* This can be called from PyEval_RestoreThread(). Similar - to it, we need to ensure errno doesn't change. - */ - int err = errno; -#endif - PyThreadState *oldts = current_fast_get(runtime); - + // XXX Do this only if oldts != NULL? current_fast_clear(runtime); if (oldts != NULL) { @@ -1887,6 +1879,20 @@ _PyThreadState_Swap(_PyRuntimeState *runtime, PyThreadState *newts) current_fast_set(runtime, newts); tstate_activate(newts); } +} + +PyThreadState * +_PyThreadState_SwapNoGIL(PyThreadState *newts) +{ +#if defined(Py_DEBUG) + /* This can be called from PyEval_RestoreThread(). Similar + to it, we need to ensure errno doesn't change. + */ + int err = errno; +#endif + + PyThreadState *oldts = current_fast_get(&_PyRuntime); + _swap_thread_states(&_PyRuntime, oldts, newts); #if defined(Py_DEBUG) errno = err; @@ -1894,6 +1900,20 @@ _PyThreadState_Swap(_PyRuntimeState *runtime, PyThreadState *newts) return oldts; } +PyThreadState * +_PyThreadState_Swap(_PyRuntimeState *runtime, PyThreadState *newts) +{ + PyThreadState *oldts = current_fast_get(runtime); + if (oldts != NULL) { + _PyEval_ReleaseLock(oldts); + } + _swap_thread_states(runtime, oldts, newts); + if (newts != NULL) { + _PyEval_AcquireLock(newts); + } + return oldts; +} + PyThreadState * PyThreadState_Swap(PyThreadState *newts) { @@ -2186,7 +2206,7 @@ PyGILState_Ensure(void) /* Ensure that _PyEval_InitThreads() and _PyGILState_Init() have been called by Py_Initialize() */ - assert(_PyEval_ThreadsInitialized(runtime)); + assert(_PyEval_ThreadsInitialized()); assert(gilstate_tss_initialized(runtime)); assert(runtime->gilstate.autoInterpreterState != NULL); diff --git a/Python/specialize.c b/Python/specialize.c index b1cc66124cfa4a..2ccca3a2802c17 100644 --- a/Python/specialize.c +++ b/Python/specialize.c @@ -1455,7 +1455,7 @@ _Py_Specialize_StoreSubscr(PyObject *container, PyObject *sub, _Py_CODEUNIT *ins goto fail; } if (PyObject_CheckBuffer(container)) { - if (PyLong_CheckExact(sub) && (((size_t)Py_SIZE(sub)) > 1)) { + if (PyLong_CheckExact(sub) && (!_PyLong_IsNonNegativeCompact((PyLongObject *)sub))) { SPECIALIZATION_FAIL(STORE_SUBSCR, SPEC_FAIL_OUT_OF_RANGE); } else if (strcmp(container_type->tp_name, "array.array") == 0) { diff --git a/Python/stdlib_module_names.h b/Python/stdlib_module_names.h index 27f42e5202e571..ed4a0ac2dd32de 100644 --- a/Python/stdlib_module_names.h +++ b/Python/stdlib_module_names.h @@ -56,6 +56,7 @@ static const char* _Py_stdlib_module_names[] = { "_posixshmem", "_posixsubprocess", "_py_abc", +"_pydatetime", "_pydecimal", "_pyio", "_pylong", diff --git a/Python/structmember.c b/Python/structmember.c index 1b8be28dcf2eb2..19a75224a0f32e 100644 --- a/Python/structmember.c +++ b/Python/structmember.c @@ -8,6 +8,12 @@ PyObject * PyMember_GetOne(const char *obj_addr, PyMemberDef *l) { PyObject *v; + if (l->flags & Py_RELATIVE_OFFSET) { + PyErr_SetString( + PyExc_SystemError, + "PyMember_GetOne used with Py_RELATIVE_OFFSET"); + return NULL; + } const char* addr = obj_addr + l->offset; switch (l->type) { @@ -103,6 +109,12 @@ int PyMember_SetOne(char *addr, PyMemberDef *l, PyObject *v) { PyObject *oldv; + if (l->flags & Py_RELATIVE_OFFSET) { + PyErr_SetString( + PyExc_SystemError, + "PyMember_SetOne used with Py_RELATIVE_OFFSET"); + return -1; + } addr += l->offset; diff --git a/Python/symtable.c b/Python/symtable.c index df7473943f3fc1..6e74d764245a57 100644 --- a/Python/symtable.c +++ b/Python/symtable.c @@ -103,6 +103,7 @@ ste_new(struct symtable *st, identifier name, _Py_block_ty block, ste->ste_comprehension = NoComprehension; ste->ste_returns_value = 0; ste->ste_needs_class_closure = 0; + ste->ste_comp_inlined = 0; ste->ste_comp_iter_target = 0; ste->ste_comp_iter_expr = 0; @@ -558,6 +559,67 @@ analyze_name(PySTEntryObject *ste, PyObject *scopes, PyObject *name, long flags, return 1; } +static int +is_free_in_any_child(PySTEntryObject *entry, PyObject *key) +{ + for (Py_ssize_t i = 0; i < PyList_GET_SIZE(entry->ste_children); i++) { + PySTEntryObject *child_ste = (PySTEntryObject *)PyList_GET_ITEM( + entry->ste_children, i); + long scope = _PyST_GetScope(child_ste, key); + if (scope == FREE) { + return 1; + } + } + return 0; +} + +static int +inline_comprehension(PySTEntryObject *ste, PySTEntryObject *comp, + PyObject *scopes, PyObject *comp_free) +{ + PyObject *k, *v; + Py_ssize_t pos = 0; + while (PyDict_Next(comp->ste_symbols, &pos, &k, &v)) { + // skip comprehension parameter + long comp_flags = PyLong_AS_LONG(v); + if (comp_flags & DEF_PARAM) { + assert(_PyUnicode_EqualToASCIIString(k, ".0")); + continue; + } + int scope = (comp_flags >> SCOPE_OFFSET) & SCOPE_MASK; + int only_flags = comp_flags & ((1 << SCOPE_OFFSET) - 1); + PyObject *existing = PyDict_GetItemWithError(ste->ste_symbols, k); + if (existing == NULL && PyErr_Occurred()) { + return 0; + } + if (!existing) { + // name does not exist in scope, copy from comprehension + assert(scope != FREE || PySet_Contains(comp_free, k) == 1); + PyObject *v_flags = PyLong_FromLong(only_flags); + if (v_flags == NULL) { + return 0; + } + int ok = PyDict_SetItem(ste->ste_symbols, k, v_flags); + Py_DECREF(v_flags); + if (ok < 0) { + return 0; + } + SET_SCOPE(scopes, k, scope); + } + else { + // free vars in comprehension that are locals in outer scope can + // now simply be locals, unless they are free in comp children + if ((PyLong_AsLong(existing) & DEF_BOUND) && + !is_free_in_any_child(comp, k)) { + if (PySet_Discard(comp_free, k) < 0) { + return 0; + } + } + } + } + return 1; +} + #undef SET_SCOPE /* If a name is defined in free and also in locals, then this block @@ -727,17 +789,17 @@ update_symbols(PyObject *symbols, PyObject *scopes, static int analyze_child_block(PySTEntryObject *entry, PyObject *bound, PyObject *free, - PyObject *global, PyObject* child_free); + PyObject *global, PyObject **child_free); static int analyze_block(PySTEntryObject *ste, PyObject *bound, PyObject *free, PyObject *global) { PyObject *name, *v, *local = NULL, *scopes = NULL, *newbound = NULL; - PyObject *newglobal = NULL, *newfree = NULL, *allfree = NULL; + PyObject *newglobal = NULL, *newfree = NULL; PyObject *temp; - int i, success = 0; - Py_ssize_t pos = 0; + int success = 0; + Py_ssize_t i, pos = 0; local = PySet_New(NULL); /* collect new names bound in block */ if (!local) @@ -746,8 +808,8 @@ analyze_block(PySTEntryObject *ste, PyObject *bound, PyObject *free, if (!scopes) goto error; - /* Allocate new global and bound variable dictionaries. These - dictionaries hold the names visible in nested blocks. For + /* Allocate new global, bound and free variable sets. These + sets hold the names visible in nested blocks. For ClassBlocks, the bound and global names are initialized before analyzing names, because class bindings aren't visible in methods. For other blocks, they are initialized @@ -826,28 +888,55 @@ analyze_block(PySTEntryObject *ste, PyObject *bound, PyObject *free, newbound, newglobal now contain the names visible in nested blocks. The free variables in the children will - be collected in allfree. + be added to newfree. */ - allfree = PySet_New(NULL); - if (!allfree) - goto error; for (i = 0; i < PyList_GET_SIZE(ste->ste_children); ++i) { + PyObject *child_free = NULL; PyObject *c = PyList_GET_ITEM(ste->ste_children, i); PySTEntryObject* entry; assert(c && PySTEntry_Check(c)); entry = (PySTEntryObject*)c; + + // we inline all non-generator-expression comprehensions + int inline_comp = + entry->ste_comprehension && + !entry->ste_generator; + if (!analyze_child_block(entry, newbound, newfree, newglobal, - allfree)) + &child_free)) + { goto error; + } + if (inline_comp) { + if (!inline_comprehension(ste, entry, scopes, child_free)) { + Py_DECREF(child_free); + goto error; + } + entry->ste_comp_inlined = 1; + } + temp = PyNumber_InPlaceOr(newfree, child_free); + Py_DECREF(child_free); + if (!temp) + goto error; + Py_DECREF(temp); /* Check if any children have free variables */ if (entry->ste_free || entry->ste_child_free) ste->ste_child_free = 1; } - temp = PyNumber_InPlaceOr(newfree, allfree); - if (!temp) - goto error; - Py_DECREF(temp); + /* Splice children of inlined comprehensions into our children list */ + for (i = PyList_GET_SIZE(ste->ste_children) - 1; i >= 0; --i) { + PyObject* c = PyList_GET_ITEM(ste->ste_children, i); + PySTEntryObject* entry; + assert(c && PySTEntry_Check(c)); + entry = (PySTEntryObject*)c; + if (entry->ste_comp_inlined && + PyList_SetSlice(ste->ste_children, i, i + 1, + entry->ste_children) < 0) + { + goto error; + } + } /* Check if any local variables must be converted to cell variables */ if (ste->ste_type == FunctionBlock && !analyze_cells(scopes, newfree)) @@ -870,7 +959,6 @@ analyze_block(PySTEntryObject *ste, PyObject *bound, PyObject *free, Py_XDECREF(newbound); Py_XDECREF(newglobal); Py_XDECREF(newfree); - Py_XDECREF(allfree); if (!success) assert(PyErr_Occurred()); return success; @@ -878,16 +966,15 @@ analyze_block(PySTEntryObject *ste, PyObject *bound, PyObject *free, static int analyze_child_block(PySTEntryObject *entry, PyObject *bound, PyObject *free, - PyObject *global, PyObject* child_free) + PyObject *global, PyObject** child_free) { PyObject *temp_bound = NULL, *temp_global = NULL, *temp_free = NULL; - PyObject *temp; - /* Copy the bound and global dictionaries. + /* Copy the bound/global/free sets. - These dictionaries are used by all blocks enclosed by the + These sets are used by all blocks enclosed by the current block. The analyze_block() call modifies these - dictionaries. + sets. */ temp_bound = PySet_New(bound); @@ -902,12 +989,8 @@ analyze_child_block(PySTEntryObject *entry, PyObject *bound, PyObject *free, if (!analyze_block(entry, temp_bound, temp_free, temp_global)) goto error; - temp = PyNumber_InPlaceOr(child_free, temp_free); - if (!temp) - goto error; - Py_DECREF(temp); + *child_free = temp_free; Py_DECREF(temp_bound); - Py_DECREF(temp_free); Py_DECREF(temp_global); return 1; error: @@ -2216,4 +2299,3 @@ _Py_Mangle(PyObject *privateobj, PyObject *ident) assert(_PyUnicode_CheckConsistency(result, 1)); return result; } - diff --git a/Python/sysmodule.c b/Python/sysmodule.c index d673e40af5e1de..894a3e8a98fd8a 100644 --- a/Python/sysmodule.c +++ b/Python/sysmodule.c @@ -365,7 +365,7 @@ PySys_AddAuditHook(Py_AuditHookFunction hook, void *userData) _PyRuntimeState *runtime = &_PyRuntime; PyThreadState *tstate; if (runtime->initialized) { - tstate = _PyRuntimeState_GetThreadState(runtime); + tstate = _PyThreadState_GET(); } else { tstate = NULL; @@ -3141,6 +3141,7 @@ _PySys_InitCore(PyThreadState *tstate, PyObject *sysdict) { PyObject *version_info; int res; + PyInterpreterState *interp = tstate->interp; /* stdin/stdout/stderr are set in pylifecycle.c */ @@ -3166,7 +3167,9 @@ _PySys_InitCore(PyThreadState *tstate, PyObject *sysdict) SET_SYS("float_info", PyFloat_GetInfo()); SET_SYS("int_info", PyLong_GetInfo()); /* initialize hash_info */ - if (_PyStructSequence_InitBuiltin(&Hash_InfoType, &hash_info_desc) < 0) { + if (_PyStructSequence_InitBuiltin(interp, &Hash_InfoType, + &hash_info_desc) < 0) + { goto type_init_failed; } SET_SYS("hash_info", get_hash_info(tstate)); @@ -3190,7 +3193,7 @@ _PySys_InitCore(PyThreadState *tstate, PyObject *sysdict) #define ENSURE_INFO_TYPE(TYPE, DESC) \ do { \ if (_PyStructSequence_InitBuiltinWithFlags( \ - &TYPE, &DESC, Py_TPFLAGS_DISALLOW_INSTANTIATION) < 0) { \ + interp, &TYPE, &DESC, Py_TPFLAGS_DISALLOW_INSTANTIATION) < 0) { \ goto type_init_failed; \ } \ } while (0) @@ -3226,8 +3229,9 @@ _PySys_InitCore(PyThreadState *tstate, PyObject *sysdict) SET_SYS("thread_info", PyThread_GetInfo()); /* initialize asyncgen_hooks */ - if (_PyStructSequence_InitBuiltin( - &AsyncGenHooksType, &asyncgen_hooks_desc) < 0) { + if (_PyStructSequence_InitBuiltin(interp, &AsyncGenHooksType, + &asyncgen_hooks_desc) < 0) + { goto type_init_failed; } @@ -3489,20 +3493,20 @@ _PySys_Create(PyThreadState *tstate, PyObject **sysmod_p) void -_PySys_Fini(PyInterpreterState *interp) +_PySys_FiniTypes(PyInterpreterState *interp) { - if (_Py_IsMainInterpreter(interp)) { - _PyStructSequence_FiniType(&VersionInfoType); - _PyStructSequence_FiniType(&FlagsType); + _PyStructSequence_FiniBuiltin(interp, &VersionInfoType); + _PyStructSequence_FiniBuiltin(interp, &FlagsType); #if defined(MS_WINDOWS) - _PyStructSequence_FiniType(&WindowsVersionType); + _PyStructSequence_FiniBuiltin(interp, &WindowsVersionType); #endif - _PyStructSequence_FiniType(&Hash_InfoType); - _PyStructSequence_FiniType(&AsyncGenHooksType); + _PyStructSequence_FiniBuiltin(interp, &Hash_InfoType); + _PyStructSequence_FiniBuiltin(interp, &AsyncGenHooksType); #ifdef __EMSCRIPTEN__ + if (_Py_IsMainInterpreter(interp)) { Py_CLEAR(EmscriptenInfoType); -#endif } +#endif } diff --git a/Python/thread.c b/Python/thread.c index 7fdedb0b9b7e26..7fc53f9b61360b 100644 --- a/Python/thread.c +++ b/Python/thread.c @@ -7,7 +7,7 @@ #include "Python.h" #include "pycore_pystate.h" // _PyInterpreterState_GET() -#include "pycore_structseq.h" // _PyStructSequence_FiniType() +#include "pycore_structseq.h" // _PyStructSequence_FiniBuiltin() #include "pycore_pythread.h" #ifndef DONT_HAVE_STDIO_H @@ -137,7 +137,8 @@ PyThread_GetInfo(void) int len; #endif - if (_PyStructSequence_InitBuiltin(&ThreadInfoType, &threadinfo_desc) < 0) { + PyInterpreterState *interp = _PyInterpreterState_GET(); + if (_PyStructSequence_InitBuiltin(interp, &ThreadInfoType, &threadinfo_desc) < 0) { return NULL; } @@ -191,9 +192,5 @@ PyThread_GetInfo(void) void _PyThread_FiniType(PyInterpreterState *interp) { - if (!_Py_IsMainInterpreter(interp)) { - return; - } - - _PyStructSequence_FiniType(&ThreadInfoType); + _PyStructSequence_FiniBuiltin(interp, &ThreadInfoType); } diff --git a/Python/traceback.c b/Python/traceback.c index 097f69c76abfe1..b2479542047308 100644 --- a/Python/traceback.c +++ b/Python/traceback.c @@ -1180,7 +1180,7 @@ dump_frame(int fd, _PyInterpreterFrame *frame) PUTS(fd, "???"); } - int lineno = _PyInterpreterFrame_GetLine(frame); + int lineno = PyUnstable_InterpreterFrame_GetLine(frame); PUTS(fd, ", line "); if (lineno >= 0) { _Py_DumpDecimal(fd, (size_t)lineno); diff --git a/Tools/build/generate_global_objects.py b/Tools/build/generate_global_objects.py index c27817702bf97d..ded19ee489e79b 100644 --- a/Tools/build/generate_global_objects.py +++ b/Tools/build/generate_global_objects.py @@ -121,6 +121,8 @@ '__xor__', '__divmod__', '__rdivmod__', + '__buffer__', + '__release_buffer__', ] NON_GENERATED_IMMORTAL_OBJECTS = [ diff --git a/Tools/build/generate_opcode_h.py b/Tools/build/generate_opcode_h.py index 645b9f1de1170b..5be981005725bf 100644 --- a/Tools/build/generate_opcode_h.py +++ b/Tools/build/generate_opcode_h.py @@ -52,6 +52,18 @@ #endif // !Py_INTERNAL_OPCODE_H """ +intrinsic_header = f""" +// Auto-generated by {SCRIPT_NAME} from {PYTHON_OPCODE} + +""".lstrip() + +intrinsic_footer = """ +typedef PyObject *(*instrinsic_func1)(PyThreadState* tstate, PyObject *value); +typedef PyObject *(*instrinsic_func2)(PyThreadState* tstate, PyObject *value1, PyObject *value2); +extern const instrinsic_func1 _PyIntrinsics_UnaryFunctions[]; +extern const instrinsic_func2 _PyIntrinsics_BinaryFunctions[]; +""" + DEFINE = "#define {:<38} {:>3}\n" UINT32_MASK = (1<<32)-1 @@ -67,7 +79,9 @@ def write_int_array_from_ops(name, ops, out): assert bits == 0 out.write(f"}};\n") -def main(opcode_py, outfile='Include/opcode.h', internaloutfile='Include/internal/pycore_opcode.h'): +def main(opcode_py, outfile='Include/opcode.h', + internaloutfile='Include/internal/pycore_opcode.h', + intrinsicoutfile='Include/internal/pycore_intrinsics.h'): opcode = {} if hasattr(tokenize, 'open'): fp = tokenize.open(opcode_py) # Python 3.2+ @@ -107,9 +121,11 @@ def main(opcode_py, outfile='Include/opcode.h', internaloutfile='Include/interna opname_including_specialized[next_op] = name used[next_op] = True - with open(outfile, 'w') as fobj, open(internaloutfile, 'w') as iobj: + with open(outfile, 'w') as fobj, open(internaloutfile, 'w') as iobj, open( + intrinsicoutfile, "w") as nobj: fobj.write(header) iobj.write(internal_header) + nobj.write(intrinsic_header) for name in opname: if name in opmap: @@ -172,6 +188,22 @@ def main(opcode_py, outfile='Include/opcode.h', internaloutfile='Include/interna for i, (op, _) in enumerate(opcode["_nb_ops"]): fobj.write(DEFINE.format(op, i)) + nobj.write("/* Unary Functions: */") + nobj.write("\n") + for i, op in enumerate(opcode["_intrinsic_1_descs"]): + nobj.write(DEFINE.format(op, i)) + nobj.write("\n") + nobj.write(DEFINE.format("MAX_INTRINSIC_1", i)) + + nobj.write("\n\n") + nobj.write("/* Binary Functions: */\n") + for i, op in enumerate(opcode["_intrinsic_2_descs"]): + nobj.write(DEFINE.format(op, i)) + nobj.write("\n") + nobj.write(DEFINE.format("MAX_INTRINSIC_2", i)) + + nobj.write(intrinsic_footer) + fobj.write("\n") fobj.write("/* Defined in Lib/opcode.py */\n") fobj.write(f"#define ENABLE_SPECIALIZATION {int(ENABLE_SPECIALIZATION)}") @@ -201,4 +233,4 @@ def main(opcode_py, outfile='Include/opcode.h', internaloutfile='Include/interna if __name__ == '__main__': - main(sys.argv[1], sys.argv[2], sys.argv[3]) + main(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4]) diff --git a/Tools/c-analyzer/cpython/_parser.py b/Tools/c-analyzer/cpython/_parser.py index 5924ab7860d8d5..9bd54db0f59c51 100644 --- a/Tools/c-analyzer/cpython/_parser.py +++ b/Tools/c-analyzer/cpython/_parser.py @@ -114,6 +114,7 @@ def clean_lines(text): Modules/md5module.c Modules/_hacl/include Modules/sha1module.c Modules/_hacl/include Modules/sha2module.c Modules/_hacl/include +Modules/sha3module.c Modules/_hacl/include Objects/stringlib/*.h Objects # possible system-installed headers, just in case @@ -271,13 +272,6 @@ def clean_lines(text): Modules/expat/xmlparse.c XML_POOR_ENTROPY 1 Modules/_dbmmodule.c HAVE_GDBM_DASH_NDBM_H 1 -# from Modules/_sha3/sha3module.c -Modules/_sha3/kcp/KeccakP-1600-inplace32BI.c PLATFORM_BYTE_ORDER 4321 # force big-endian -Modules/_sha3/kcp/*.c KeccakOpt 64 -Modules/_sha3/kcp/*.c KeccakP200_excluded 1 -Modules/_sha3/kcp/*.c KeccakP400_excluded 1 -Modules/_sha3/kcp/*.c KeccakP800_excluded 1 - # others Modules/_sre/sre_lib.h LOCAL(type) static inline type Modules/_sre/sre_lib.h SRE(F) sre_ucs2_##F diff --git a/Tools/c-analyzer/cpython/globals-to-fix.tsv b/Tools/c-analyzer/cpython/globals-to-fix.tsv index 4dfbbe72df56a0..453f63ec3f1cfb 100644 --- a/Tools/c-analyzer/cpython/globals-to-fix.tsv +++ b/Tools/c-analyzer/cpython/globals-to-fix.tsv @@ -86,6 +86,7 @@ Objects/sliceobject.c - PyEllipsis_Type - Objects/sliceobject.c - PySlice_Type - Objects/tupleobject.c - PyTupleIter_Type - Objects/tupleobject.c - PyTuple_Type - +Objects/typeobject.c - _PyBufferWrapper_Type - Objects/typeobject.c - PyBaseObject_Type - Objects/typeobject.c - PySuper_Type - Objects/typeobject.c - PyType_Type - @@ -317,12 +318,9 @@ Python/instrumentation.c - _PyInstrumentation_MISSING - ## static types Modules/_io/bufferedio.c - PyBufferedIOBase_Type - -Modules/_io/bytesio.c - _PyBytesIOBuffer_Type - Modules/_io/iobase.c - PyIOBase_Type - Modules/_io/iobase.c - PyRawIOBase_Type - -Modules/_io/textio.c - PyIncrementalNewlineDecoder_Type - Modules/_io/textio.c - PyTextIOBase_Type - -Modules/_io/winconsoleio.c - PyWindowsConsoleIO_Type - Modules/_testcapi/vectorcall.c - MethodDescriptorBase_Type - Modules/_testcapi/vectorcall.c - MethodDescriptorDerived_Type - Modules/_testcapi/vectorcall.c - MethodDescriptorNopGet_Type - diff --git a/Tools/c-analyzer/cpython/ignored.tsv b/Tools/c-analyzer/cpython/ignored.tsv index 7a5d7d45f5184b..7ba116dcb171cf 100644 --- a/Tools/c-analyzer/cpython/ignored.tsv +++ b/Tools/c-analyzer/cpython/ignored.tsv @@ -216,6 +216,7 @@ Modules/_io/_iomodule.c - static_types - Modules/_io/textio.c - encodefuncs - Modules/_io/winconsoleio.c - _PyWindowsConsoleIO_Type - Modules/_localemodule.c - langinfo_constants - +Modules/_lsprof.c - callback_table - Modules/_pickle.c - READ_WHOLE_LINE - Modules/_sqlite/module.c - error_codes - Modules/_sre/sre.c pattern_repr flag_names - @@ -404,6 +405,7 @@ Modules/_testbuffer.c ndarray_memoryview_from_buffer strides - Modules/_testbuffer.c ndarray_memoryview_from_buffer suboffsets - Modules/_testbuffer.c ndarray_push kwlist - Modules/_testbuffer.c staticarray_init kwlist - +Modules/_testcapi/buffer.c - testBufType - Modules/_testcapi/code.c get_code_extra_index key - Modules/_testcapi/datetime.c - test_run_counter - Modules/_testcapi/exceptions.c - PyRecursingInfinitelyError_Type - diff --git a/Tools/clinic/clinic.py b/Tools/clinic/clinic.py index b8b2b75c749152..a6f330d1502dad 100755 --- a/Tools/clinic/clinic.py +++ b/Tools/clinic/clinic.py @@ -43,7 +43,18 @@ NO_VARARG = "PY_SSIZE_T_MAX" CLINIC_PREFIX = "__clinic_" -CLINIC_PREFIXED_ARGS = {"args"} +CLINIC_PREFIXED_ARGS = { + "_keywords", + "_parser", + "args", + "argsbuf", + "fastargs", + "kwargs", + "kwnames", + "nargs", + "noptargs", + "return_value", +} class Unspecified: def __repr__(self): @@ -345,6 +356,7 @@ def __init__(self): # you should check the _return_value for errors, and # "goto exit" if there are any. self.return_conversion = [] + self.converter_retval = "_return_value" # The C statements required to do some operations # after the end of parsing but before cleaning up. @@ -803,13 +815,11 @@ def output_templates(self, f): # parser_body_fields remembers the fields passed in to the # previous call to parser_body. this is used for an awful hack. parser_body_fields = () - parser_body_declarations = '' def parser_body(prototype, *fields, declarations=''): - nonlocal parser_body_fields, parser_body_declarations + nonlocal parser_body_fields add, output = text_accumulator() add(prototype) parser_body_fields = fields - parser_body_declarations = declarations fields = list(fields) fields.insert(0, normalize_snippet(""" @@ -1943,12 +1953,12 @@ def dump(self): return_converters = {} -def write_file(filename, new_contents): +def write_file(filename, new_contents, force=False): try: with open(filename, 'r', encoding="utf-8") as fp: old_contents = fp.read() - if old_contents == new_contents: + if old_contents == new_contents and not force: # no change: avoid modifying the file modification time return except FileNotFoundError: @@ -2112,7 +2122,7 @@ def parse(self, input): traceback.format_exc().rstrip()) printer.print_block(block) - second_pass_replacements = {} + clinic_out = [] # these are destinations not buffers for name, destination in self.destinations.items(): @@ -2153,25 +2163,11 @@ def parse(self, input): block.input = 'preserve\n' printer_2 = BlockPrinter(self.language) printer_2.print_block(block, core_includes=True) - write_file(destination.filename, printer_2.f.getvalue()) + pair = destination.filename, printer_2.f.getvalue() + clinic_out.append(pair) continue - text = printer.f.getvalue() - - if second_pass_replacements: - printer_2 = BlockPrinter(self.language) - parser_2 = BlockParser(text, self.language) - changed = False - for block in parser_2: - if block.dsl_name: - for id, replacement in second_pass_replacements.items(): - if id in block.output: - changed = True - block.output = block.output.replace(id, replacement) - printer_2.print_block(block) - if changed: - text = printer_2.f.getvalue() - return text + return printer.f.getvalue(), clinic_out def _module_and_class(self, fields): @@ -2227,9 +2223,13 @@ def parse_file(filename, *, verify=True, output=None): return clinic = Clinic(language, verify=verify, filename=filename) - cooked = clinic.parse(raw) + src_out, clinic_out = clinic.parse(raw) - write_file(output, cooked) + # If clinic output changed, force updating the source file as well. + force = bool(clinic_out) + write_file(output, src_out, force=force) + for fn, data in clinic_out: + write_file(fn, data) def compute_checksum(input, length=None): @@ -3894,15 +3894,15 @@ def __init__(self, *, py_default=None, **kwargs): def return_converter_init(self): pass - def declare(self, data, name="_return_value"): + def declare(self, data): line = [] add = line.append add(self.type) if not self.type.endswith('*'): add(' ') - add(name + ';') + add(data.converter_retval + ';') data.declarations.append(''.join(line)) - data.return_value = name + data.return_value = data.converter_retval def err_occurred_if(self, expr, data): data.return_conversion.append('if (({}) && PyErr_Occurred()) {{\n goto exit;\n}}\n'.format(expr)) @@ -3924,8 +3924,10 @@ class bool_return_converter(CReturnConverter): def render(self, function, data): self.declare(data) - self.err_occurred_if("_return_value == -1", data) - data.return_conversion.append('return_value = PyBool_FromLong((long)_return_value);\n') + self.err_occurred_if(f"{data.converter_retval} == -1", data) + data.return_conversion.append( + f'return_value = PyBool_FromLong((long){data.converter_retval});\n' + ) class long_return_converter(CReturnConverter): type = 'long' @@ -3935,9 +3937,10 @@ class long_return_converter(CReturnConverter): def render(self, function, data): self.declare(data) - self.err_occurred_if("_return_value == {}-1".format(self.unsigned_cast), data) + self.err_occurred_if(f"{data.converter_retval} == {self.unsigned_cast}-1", data) data.return_conversion.append( - ''.join(('return_value = ', self.conversion_fn, '(', self.cast, '_return_value);\n'))) + f'return_value = {self.conversion_fn}({self.cast}{data.converter_retval});\n' + ) class int_return_converter(long_return_converter): type = 'int' @@ -3979,9 +3982,10 @@ class double_return_converter(CReturnConverter): def render(self, function, data): self.declare(data) - self.err_occurred_if("_return_value == -1.0", data) + self.err_occurred_if(f"{data.converter_retval} == -1.0", data) data.return_conversion.append( - 'return_value = PyFloat_FromDouble(' + self.cast + '_return_value);\n') + f'return_value = PyFloat_FromDouble({self.cast}{data.converter_retval});\n' + ) class float_return_converter(double_return_converter): type = 'float' diff --git a/Tools/msi/bundle/bootstrap/pch.h b/Tools/msi/bundle/bootstrap/pch.h index b0aa5111dabd0d..6d0974b34c61e7 100644 --- a/Tools/msi/bundle/bootstrap/pch.h +++ b/Tools/msi/bundle/bootstrap/pch.h @@ -5,7 +5,7 @@ // The license and further copyright text can be found in the file // LICENSE.TXT at the root directory of the distribution. // -// +// // // Precompiled header for standard bootstrapper application. // diff --git a/Tools/msi/bundle/bootstrap/resource.h b/Tools/msi/bundle/bootstrap/resource.h index 53c03c319f091f..d951e651f6d20d 100644 --- a/Tools/msi/bundle/bootstrap/resource.h +++ b/Tools/msi/bundle/bootstrap/resource.h @@ -14,7 +14,7 @@ // Next default values for new objects -// +// #ifdef APSTUDIO_INVOKED #ifndef APSTUDIO_READONLY_SYMBOLS #define _APS_NEXT_RESOURCE_VALUE 102 diff --git a/configure b/configure index 8133d47f61355b..c9ea72cf6efacf 100755 --- a/configure +++ b/configure @@ -6143,6 +6143,20 @@ cat > conftest.c <=6) && defined(_MIPSEL) @@ -10729,6 +10743,41 @@ cat >>confdefs.h <<_ACEOF _ACEOF +# The cast to long int works around a bug in the HP C Compiler, +# see AC_CHECK_SIZEOF for more information. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking alignment of max_align_t" >&5 +$as_echo_n "checking alignment of max_align_t... " >&6; } +if ${ac_cv_alignof_max_align_t+:} false; then : + $as_echo_n "(cached) " >&6 +else + if ac_fn_c_compute_int "$LINENO" "(long int) offsetof (ac__type_alignof_, y)" "ac_cv_alignof_max_align_t" "$ac_includes_default +#ifndef offsetof +# define offsetof(type, member) ((char *) &((type *) 0)->member - (char *) 0) +#endif +typedef struct { char x; max_align_t y; } ac__type_alignof_;"; then : + +else + if test "$ac_cv_type_max_align_t" = yes; then + { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error 77 "cannot compute alignment of max_align_t +See \`config.log' for more details" "$LINENO" 5; } + else + ac_cv_alignof_max_align_t=0 + fi +fi + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_alignof_max_align_t" >&5 +$as_echo "$ac_cv_alignof_max_align_t" >&6; } + + + +cat >>confdefs.h <<_ACEOF +#define ALIGNOF_MAX_ALIGN_T $ac_cv_alignof_max_align_t +_ACEOF + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for long double" >&5 diff --git a/configure.ac b/configure.ac index 3f20d8980d8abc..10672bd3761da8 100644 --- a/configure.ac +++ b/configure.ac @@ -959,6 +959,20 @@ cat > conftest.c <=6) && defined(_MIPSEL) @@ -2921,6 +2935,7 @@ AC_CHECK_SIZEOF(size_t, 4) AC_CHECK_ALIGNOF(size_t) AC_CHECK_SIZEOF(pid_t, 4) AC_CHECK_SIZEOF(uintptr_t) +AC_CHECK_ALIGNOF(max_align_t) AC_TYPE_LONG_DOUBLE AC_CHECK_SIZEOF(long double, 16) diff --git a/pyconfig.h.in b/pyconfig.h.in index 236cee6588d49b..2c22b27af65ea3 100644 --- a/pyconfig.h.in +++ b/pyconfig.h.in @@ -19,6 +19,9 @@ /* The normal alignment of `long', in bytes. */ #undef ALIGNOF_LONG +/* The normal alignment of `max_align_t', in bytes. */ +#undef ALIGNOF_MAX_ALIGN_T + /* The normal alignment of `size_t', in bytes. */ #undef ALIGNOF_SIZE_T