diff --git a/Doc/_static/og-image.png b/Doc/_static/og-image.png
new file mode 100644
index 00000000000000..0e80751e740387
Binary files /dev/null and b/Doc/_static/og-image.png differ
diff --git a/Doc/c-api/code.rst b/Doc/c-api/code.rst
index 9054e7ee3181a5..a6eb86f1a0b514 100644
--- a/Doc/c-api/code.rst
+++ b/Doc/c-api/code.rst
@@ -115,3 +115,51 @@ bound into a function.
the free variables. On error, ``NULL`` is returned and an exception is raised.
.. versionadded:: 3.11
+
+.. c:function:: int PyCode_AddWatcher(PyCode_WatchCallback callback)
+
+ Register *callback* as a code object watcher for the current interpreter.
+ Return an ID which may be passed to :c:func:`PyCode_ClearWatcher`.
+ In case of error (e.g. no more watcher IDs available),
+ return ``-1`` and set an exception.
+
+ .. versionadded:: 3.12
+
+.. c:function:: int PyCode_ClearWatcher(int watcher_id)
+
+ Clear watcher identified by *watcher_id* previously returned from
+ :c:func:`PyCode_AddWatcher` for the current interpreter.
+ Return ``0`` on success, or ``-1`` and set an exception on error
+ (e.g. if the given *watcher_id* was never registered.)
+
+ .. versionadded:: 3.12
+
+.. c:type:: PyCodeEvent
+
+ Enumeration of possible code object watcher events:
+ - ``PY_CODE_EVENT_CREATE``
+ - ``PY_CODE_EVENT_DESTROY``
+
+ .. versionadded:: 3.12
+
+.. c:type:: int (*PyCode_WatchCallback)(PyCodeEvent event, PyCodeObject* co)
+
+ Type of a code object watcher callback function.
+
+ If *event* is ``PY_CODE_EVENT_CREATE``, then the callback is invoked
+ after `co` has been fully initialized. Otherwise, the callback is invoked
+ before the destruction of *co* takes place, so the prior state of *co*
+ can be inspected.
+
+ Users of this API should not rely on internal runtime implementation
+ details. Such details may include, but are not limited to, the exact
+ order and timing of creation and destruction of code objects. While
+ changes in these details may result in differences observable by watchers
+ (including whether a callback is invoked or not), it does not change
+ the semantics of the Python code being executed.
+
+ If the callback returns with an exception set, it must return ``-1``; this
+ exception will be printed as an unraisable exception using
+ :c:func:`PyErr_WriteUnraisable`. Otherwise it should return ``0``.
+
+ .. versionadded:: 3.12
diff --git a/Doc/c-api/refcounting.rst b/Doc/c-api/refcounting.rst
index cd1f2ef7076836..d8e9c2da6f3ff3 100644
--- a/Doc/c-api/refcounting.rst
+++ b/Doc/c-api/refcounting.rst
@@ -7,8 +7,8 @@
Reference Counting
******************
-The macros in this section are used for managing reference counts of Python
-objects.
+The functions and macros in this section are used for managing reference counts
+of Python objects.
.. c:function:: Py_ssize_t Py_REFCNT(PyObject *o)
@@ -129,6 +129,11 @@ objects.
It is a good idea to use this macro whenever decrementing the reference
count of an object that might be traversed during garbage collection.
+ .. versionchanged:: 3.12
+ The macro argument is now only evaluated once. If the argument has side
+ effects, these are no longer duplicated.
+
+
.. c:function:: void Py_IncRef(PyObject *o)
Increment the reference count for object *o*. A function version of :c:func:`Py_XINCREF`.
@@ -139,3 +144,40 @@ objects.
Decrement the reference count for object *o*. A function version of :c:func:`Py_XDECREF`.
It can be used for runtime dynamic embedding of Python.
+
+
+.. c:macro:: Py_SETREF(dst, src)
+
+ Macro safely decrementing the `dst` reference count and setting `dst` to
+ `src`.
+
+ As in case of :c:func:`Py_CLEAR`, "the obvious" code can be deadly::
+
+ Py_DECREF(dst);
+ dst = src;
+
+ The safe way is::
+
+ Py_SETREF(dst, src);
+
+ That arranges to set `dst` to `src` _before_ decrementing reference count of
+ *dst* old value, so that any code triggered as a side-effect of `dst`
+ getting torn down no longer believes `dst` points to a valid object.
+
+ .. versionadded:: 3.6
+
+ .. versionchanged:: 3.12
+ The macro arguments are now only evaluated once. If an argument has side
+ effects, these are no longer duplicated.
+
+
+.. c:macro:: Py_XSETREF(dst, src)
+
+ Variant of :c:macro:`Py_SETREF` macro that uses :c:func:`Py_XDECREF` instead
+ of :c:func:`Py_DECREF`.
+
+ .. versionadded:: 3.6
+
+ .. versionchanged:: 3.12
+ The macro arguments are now only evaluated once. If an argument has side
+ effects, these are no longer duplicated.
diff --git a/Doc/conf.py b/Doc/conf.py
index 6fad5c668dab31..b3da8fa9ec4497 100644
--- a/Doc/conf.py
+++ b/Doc/conf.py
@@ -13,9 +13,25 @@
# General configuration
# ---------------------
-extensions = ['sphinx.ext.coverage', 'sphinx.ext.doctest',
- 'pyspecific', 'c_annotations', 'escape4chm',
- 'asdl_highlight', 'peg_highlight', 'glossary_search']
+extensions = [
+ 'asdl_highlight',
+ 'c_annotations',
+ 'escape4chm',
+ 'glossary_search',
+ 'peg_highlight',
+ 'pyspecific',
+ 'sphinx.ext.coverage',
+ 'sphinx.ext.doctest',
+]
+
+# Skip if downstream redistributors haven't installed it
+try:
+ import sphinxext.opengraph
+except ImportError:
+ pass
+else:
+ extensions.append('sphinxext.opengraph')
+
doctest_global_setup = '''
try:
@@ -89,6 +105,14 @@
# Short title used e.g. for
HTML tags.
html_short_title = '%s Documentation' % release
+# Deployment preview information, from Netlify
+# (See netlify.toml and https://docs.netlify.com/configure-builds/environment-variables/#git-metadata)
+html_context = {
+ "is_deployment_preview": os.getenv("IS_DEPLOYMENT_PREVIEW"),
+ "repository_url": os.getenv("REPOSITORY_URL"),
+ "pr_id": os.getenv("REVIEW_ID")
+}
+
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
@@ -114,7 +138,7 @@
html_use_opensearch = 'https://docs.python.org/' + version
# Additional static files.
-html_static_path = ['tools/static']
+html_static_path = ['_static', 'tools/static']
# Output file base name for HTML help builder.
htmlhelp_basename = 'python' + release.replace('.', '')
@@ -238,3 +262,13 @@
# Relative filename of the data files
refcount_file = 'data/refcounts.dat'
stable_abi_file = 'data/stable_abi.dat'
+
+# sphinxext-opengraph config
+ogp_site_url = 'https://docs.python.org/3/'
+ogp_site_name = 'Python documentation'
+ogp_image = '_static/og-image.png'
+ogp_custom_meta_tags = [
+ '',
+ '',
+ '',
+]
diff --git a/Doc/howto/enum.rst b/Doc/howto/enum.rst
index 98d9f4febe2dfa..4525acb04503b3 100644
--- a/Doc/howto/enum.rst
+++ b/Doc/howto/enum.rst
@@ -158,6 +158,7 @@ And a function to display the chores for a given day::
... for chore, days in chores.items():
... if day in days:
... print(chore)
+ ...
>>> show_chores(chores_for_ethan, Weekday.SATURDAY)
answer SO questions
@@ -459,6 +460,31 @@ sense to allow sharing some common behavior between a group of enumerations.
(See `OrderedEnum`_ for an example.)
+.. _enum-dataclass-support:
+
+Dataclass support
+-----------------
+
+When inheriting from a :class:`~dataclasses.dataclass`,
+the :meth:`~Enum.__repr__` omits the inherited class' name. For example::
+
+ >>> @dataclass
+ ... class CreatureDataMixin:
+ ... size: str
+ ... legs: int
+ ... tail: bool = field(repr=False, default=True)
+ ...
+ >>> class Creature(CreatureDataMixin, Enum):
+ ... BEETLE = 'small', 6
+ ... DOG = 'medium', 4
+ ...
+ >>> Creature.DOG
+
+
+Use the :func:`!dataclass` argument ``repr=False``
+to use the standard :func:`repr`.
+
+
Pickling
--------
@@ -687,6 +713,7 @@ It is also possible to name the combinations::
... W = 2
... X = 1
... RWX = 7
+ ...
>>> Perm.RWX
>>> ~Perm.RWX
diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst
index f8839d0986d047..e6c96486492572 100644
--- a/Doc/library/argparse.rst
+++ b/Doc/library/argparse.rst
@@ -565,6 +565,7 @@ arguments they contain. For example::
>>> with open('args.txt', 'w', encoding=sys.getfilesystemencoding()) as fp:
... fp.write('-f\nbar')
+ ...
>>> parser = argparse.ArgumentParser(fromfile_prefix_chars='@')
>>> parser.add_argument('-f')
>>> parser.parse_args(['-f', 'foo', '@args.txt'])
diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst
index d0a1ed2b99e55d..fd47b0c24d8a16 100644
--- a/Doc/library/asyncio-eventloop.rst
+++ b/Doc/library/asyncio-eventloop.rst
@@ -33,7 +33,8 @@ an event loop:
Return the running event loop in the current OS thread.
- If there is no running event loop a :exc:`RuntimeError` is raised.
+ Raise a :exc:`RuntimeError` if there is no running event loop.
+
This function can only be called from a coroutine or a callback.
.. versionadded:: 3.7
@@ -42,27 +43,31 @@ an event loop:
Get the current event loop.
- If there is no current event loop set in the current OS thread,
- the OS thread is main, and :func:`set_event_loop` has not yet
- been called, asyncio will create a new event loop and set it as the
- current one.
+ When called from a coroutine or a callback (e.g. scheduled with
+ call_soon or similar API), this function will always return the
+ running event loop.
+
+ If there is no running event loop set, the function will return
+ the result of calling ``get_event_loop_policy().get_event_loop()``.
Because this function has rather complex behavior (especially
when custom event loop policies are in use), using the
:func:`get_running_loop` function is preferred to :func:`get_event_loop`
in coroutines and callbacks.
- Consider also using the :func:`asyncio.run` function instead of using
- lower level functions to manually create and close an event loop.
+ As noted above, consider using the higher-level :func:`asyncio.run` function,
+ instead of using these lower level functions to manually create and close an
+ event loop.
- .. deprecated:: 3.10
- Deprecation warning is emitted if there is no running event loop.
- In future Python releases, this function will be an alias of
- :func:`get_running_loop`.
+ .. note::
+ In Python versions 3.10.0--3.10.8 and 3.11.0 this function
+ (and other functions which used it implicitly) emitted a
+ :exc:`DeprecationWarning` if there was no running event loop, even if
+ the current loop was set.
.. function:: set_event_loop(loop)
- Set *loop* as a current event loop for the current OS thread.
+ Set *loop* as the current event loop for the current OS thread.
.. function:: new_event_loop()
diff --git a/Doc/library/asyncio-llapi-index.rst b/Doc/library/asyncio-llapi-index.rst
index b7ad888a7b67ab..9ce48a24444e66 100644
--- a/Doc/library/asyncio-llapi-index.rst
+++ b/Doc/library/asyncio-llapi-index.rst
@@ -19,7 +19,7 @@ Obtaining the Event Loop
- The **preferred** function to get the running event loop.
* - :func:`asyncio.get_event_loop`
- - Get an event loop instance (current or via the policy).
+ - Get an event loop instance (running or current via the current policy).
* - :func:`asyncio.set_event_loop`
- Set the event loop as current via the current policy.
diff --git a/Doc/library/asyncio-policy.rst b/Doc/library/asyncio-policy.rst
index 98c85015874689..ccd95244947534 100644
--- a/Doc/library/asyncio-policy.rst
+++ b/Doc/library/asyncio-policy.rst
@@ -116,6 +116,10 @@ asyncio ships with the following built-in policies:
On Windows, :class:`ProactorEventLoop` is now used by default.
+ .. versionchanged:: 3.12
+ :meth:`get_event_loop` now raises a :exc:`RuntimeError` if there is no
+ current event loop set.
+
.. class:: WindowsSelectorEventLoopPolicy
diff --git a/Doc/library/asyncio-stream.rst b/Doc/library/asyncio-stream.rst
index d87e3c042c9977..c1ae8abb9abcd5 100644
--- a/Doc/library/asyncio-stream.rst
+++ b/Doc/library/asyncio-stream.rst
@@ -52,6 +52,7 @@ and work with streams:
limit=None, ssl=None, family=0, proto=0, \
flags=0, sock=None, local_addr=None, \
server_hostname=None, ssl_handshake_timeout=None, \
+ ssl_shutdown_timeout=None, \
happy_eyeballs_delay=None, interleave=None)
Establish a network connection and return a pair of
@@ -82,6 +83,9 @@ and work with streams:
.. versionchanged:: 3.10
Removed the *loop* parameter.
+ .. versionchanged:: 3.11
+ Added the *ssl_shutdown_timeout* parameter.
+
.. coroutinefunction:: start_server(client_connected_cb, host=None, \
port=None, *, limit=None, \
@@ -89,7 +93,7 @@ and work with streams:
flags=socket.AI_PASSIVE, sock=None, \
backlog=100, ssl=None, reuse_address=None, \
reuse_port=None, ssl_handshake_timeout=None, \
- start_serving=True)
+ ssl_shutdown_timeout=None, start_serving=True)
Start a socket server.
@@ -121,12 +125,15 @@ and work with streams:
.. versionchanged:: 3.10
Removed the *loop* parameter.
+ .. versionchanged:: 3.11
+ Added the *ssl_shutdown_timeout* parameter.
+
.. rubric:: Unix Sockets
.. coroutinefunction:: open_unix_connection(path=None, *, limit=None, \
ssl=None, sock=None, server_hostname=None, \
- ssl_handshake_timeout=None)
+ ssl_handshake_timeout=None, ssl_shutdown_timeout=None)
Establish a Unix socket connection and return a pair of
``(reader, writer)``.
@@ -150,10 +157,14 @@ and work with streams:
.. versionchanged:: 3.10
Removed the *loop* parameter.
+ .. versionchanged:: 3.11
+ Added the *ssl_shutdown_timeout* parameter.
+
.. coroutinefunction:: start_unix_server(client_connected_cb, path=None, \
*, limit=None, sock=None, backlog=100, ssl=None, \
- ssl_handshake_timeout=None, start_serving=True)
+ ssl_handshake_timeout=None, \
+ ssl_shutdown_timeout=None, start_serving=True)
Start a Unix socket server.
@@ -176,6 +187,9 @@ and work with streams:
.. versionchanged:: 3.10
Removed the *loop* parameter.
+ .. versionchanged:: 3.11
+ Added the *ssl_shutdown_timeout* parameter.
+
StreamReader
============
diff --git a/Doc/library/bz2.rst b/Doc/library/bz2.rst
index ae5a1598f84b44..32df99869eb530 100644
--- a/Doc/library/bz2.rst
+++ b/Doc/library/bz2.rst
@@ -320,9 +320,11 @@ Writing and reading a bzip2-compressed file in binary mode:
>>> with bz2.open("myfile.bz2", "wb") as f:
... # Write compressed data to file
... unused = f.write(data)
+ ...
>>> with bz2.open("myfile.bz2", "rb") as f:
... # Decompress data from file
... content = f.read()
+ ...
>>> content == data # Check equality to original object after round-trip
True
diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst
index 53b4b69f84b7bf..2cffc2300a2298 100644
--- a/Doc/library/collections.rst
+++ b/Doc/library/collections.rst
@@ -229,6 +229,7 @@ For example::
>>> cnt = Counter()
>>> for word in ['red', 'blue', 'red', 'green', 'blue', 'blue']:
... cnt[word] += 1
+ ...
>>> cnt
Counter({'blue': 3, 'red': 2, 'green': 1})
@@ -818,6 +819,7 @@ zero):
>>> def constant_factory(value):
... return lambda: value
+ ...
>>> d = defaultdict(constant_factory(''))
>>> d.update(name='John', action='ran')
>>> '%(name)s %(action)s to %(object)s' % d
diff --git a/Doc/library/dataclasses.rst b/Doc/library/dataclasses.rst
index 847299649d1efd..32c524a7348719 100644
--- a/Doc/library/dataclasses.rst
+++ b/Doc/library/dataclasses.rst
@@ -79,7 +79,8 @@ Module contents
class C:
...
- @dataclass(init=True, repr=True, eq=True, order=False, unsafe_hash=False, frozen=False, match_args=True, kw_only=False, slots=False, weakref_slot=False)
+ @dataclass(init=True, repr=True, eq=True, order=False, unsafe_hash=False, frozen=False,
+ match_args=True, kw_only=False, slots=False, weakref_slot=False)
class C:
...
diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst
index f7e2bb3f3c6de3..8bfed19d3fd2c6 100644
--- a/Doc/library/datetime.rst
+++ b/Doc/library/datetime.rst
@@ -765,6 +765,7 @@ Example of counting days to an event::
>>> my_birthday = date(today.year, 6, 24)
>>> if my_birthday < today:
... my_birthday = my_birthday.replace(year=today.year + 1)
+ ...
>>> my_birthday
datetime.date(2008, 6, 24)
>>> time_to_birthday = abs(my_birthday - today)
@@ -2601,7 +2602,7 @@ Notes:
(9)
When used with the :meth:`strptime` method, the leading zero is optional
- for formats ``%d``, ``%m``, ``%H``, ``%I``, ``%M``, ``%S``, ``%J``, ``%U``,
+ for formats ``%d``, ``%m``, ``%H``, ``%I``, ``%M``, ``%S``, ``%j``, ``%U``,
``%W``, and ``%V``. Format ``%y`` does require a leading zero.
.. rubric:: Footnotes
diff --git a/Doc/library/decimal.rst b/Doc/library/decimal.rst
index 260108136df7f1..fec9b86864c578 100644
--- a/Doc/library/decimal.rst
+++ b/Doc/library/decimal.rst
@@ -2057,6 +2057,7 @@ to handle the :meth:`quantize` step:
>>> def mul(x, y, fp=TWOPLACES):
... return (x * y).quantize(fp)
+ ...
>>> def div(x, y, fp=TWOPLACES):
... return (x / y).quantize(fp)
diff --git a/Doc/library/doctest.rst b/Doc/library/doctest.rst
index c106d5a3383a5e..d6e4dca0860671 100644
--- a/Doc/library/doctest.rst
+++ b/Doc/library/doctest.rst
@@ -351,6 +351,7 @@ The fine print:
>>> def f(x):
... r'''Backslashes in a raw docstring: m\n'''
+ ...
>>> print(f.__doc__)
Backslashes in a raw docstring: m\n
@@ -360,6 +361,7 @@ The fine print:
>>> def f(x):
... '''Backslashes in a raw docstring: m\\n'''
+ ...
>>> print(f.__doc__)
Backslashes in a raw docstring: m\n
@@ -1055,7 +1057,7 @@ from text files and modules with doctests:
from a text file using :func:`DocFileSuite`.
-.. function:: DocTestSuite(module=None, globs=None, extraglobs=None, test_finder=None, setUp=None, tearDown=None, checker=None)
+.. function:: DocTestSuite(module=None, globs=None, extraglobs=None, test_finder=None, setUp=None, tearDown=None, optionflags=0, checker=None)
Convert doctest tests for a module to a :class:`unittest.TestSuite`.
diff --git a/Doc/library/email.policy.rst b/Doc/library/email.policy.rst
index bf53b9520fc723..2439dee676c9b0 100644
--- a/Doc/library/email.policy.rst
+++ b/Doc/library/email.policy.rst
@@ -97,6 +97,7 @@ file on disk and pass it to the system ``sendmail`` program on a Unix system:
>>> from subprocess import Popen, PIPE
>>> with open('mymsg.txt', 'rb') as f:
... msg = message_from_binary_file(f, policy=policy.default)
+ ...
>>> p = Popen(['sendmail', msg['To'].addresses[0]], stdin=PIPE)
>>> g = BytesGenerator(p.stdin, policy=msg.policy.clone(linesep='\r\n'))
>>> g.flatten(msg)
diff --git a/Doc/library/enum.rst b/Doc/library/enum.rst
index 74d9e67327629e..25a6e1f0b61677 100644
--- a/Doc/library/enum.rst
+++ b/Doc/library/enum.rst
@@ -194,7 +194,7 @@ Data Types
.. method:: EnumType.__getitem__(cls, name)
- Returns the Enum member in *cls* matching *name*, or raises an :exc:`KeyError`::
+ Returns the Enum member in *cls* matching *name*, or raises a :exc:`KeyError`::
>>> Color['BLUE']
@@ -241,7 +241,7 @@ Data Types
.. note:: Enum member values
- Member values can be anything: :class:`int`, :class:`str`, etc.. If
+ Member values can be anything: :class:`int`, :class:`str`, etc. If
the exact value is unimportant you may use :class:`auto` instances and an
appropriate value will be chosen for you. See :class:`auto` for the
details.
@@ -255,7 +255,7 @@ Data Types
names will also be removed from the completed enumeration. See
:ref:`TimePeriod ` for an example.
- .. method:: Enum.__call__(cls, value, names=None, \*, module=None, qualname=None, type=None, start=1, boundary=None)
+ .. method:: Enum.__call__(cls, value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)
This method is called in two different ways:
@@ -272,8 +272,8 @@ Data Types
:module: The name of the module the new Enum is created in.
:qualname: The actual location in the module where this Enum can be found.
:type: A mix-in type for the new Enum.
- :start: The first integer value for the Enum (used by :class:`auto`)
- :boundary: How to handle out-of-range values from bit operations (:class:`Flag` only)
+ :start: The first integer value for the Enum (used by :class:`auto`).
+ :boundary: How to handle out-of-range values from bit operations (:class:`Flag` only).
.. method:: Enum.__dir__(self)
@@ -292,6 +292,7 @@ Data Types
... @classmethod
... def today(cls):
... print('today is %s' % cls(date.today().isoweekday()).name)
+ ...
>>> dir(Weekday.SATURDAY)
['__class__', '__doc__', '__eq__', '__hash__', '__module__', 'name', 'today', 'value']
@@ -309,13 +310,14 @@ Data Types
>>> class PowersOfThree(Enum):
... @staticmethod
... def _generate_next_value_(name, start, count, last_values):
- ... return (count + 1) * 3
+ ... return 3 ** (count + 1)
... FIRST = auto()
... SECOND = auto()
+ ...
>>> PowersOfThree.SECOND.value
- 6
+ 9
- .. method:: Enum.__init_subclass__(cls, \**kwds)
+ .. method:: Enum.__init_subclass__(cls, **kwds)
A *classmethod* that is used to further configure subsequent subclasses.
By default, does nothing.
@@ -336,6 +338,7 @@ Data Types
... if member.value == value:
... return member
... return None
+ ...
>>> Build.DEBUG.value
'debug'
>>> Build('deBUG')
@@ -353,6 +356,7 @@ Data Types
... def __repr__(self):
... cls_name = self.__class__.__name__
... return f'{cls_name}.{self.name}'
+ ...
>>> OtherStyle.ALTERNATE, str(OtherStyle.ALTERNATE), f"{OtherStyle.ALTERNATE}"
(OtherStyle.ALTERNATE, 'OtherStyle.ALTERNATE', 'OtherStyle.ALTERNATE')
@@ -367,13 +371,14 @@ Data Types
... SOMETHING_ELSE = auto()
... def __str__(self):
... return f'{self.name}'
+ ...
>>> OtherStyle.ALTERNATE, str(OtherStyle.ALTERNATE), f"{OtherStyle.ALTERNATE}"
(, 'ALTERNATE', 'ALTERNATE')
.. method:: Enum.__format__(self)
Returns the string used for *format()* and *f-string* calls. By default,
- returns :meth:`__str__` returns, but can be overridden::
+ returns :meth:`__str__` return value, but can be overridden::
>>> class OtherStyle(Enum):
... ALTERNATE = auto()
@@ -381,6 +386,7 @@ Data Types
... SOMETHING_ELSE = auto()
... def __format__(self, spec):
... return f'{self.name}'
+ ...
>>> OtherStyle.ALTERNATE, str(OtherStyle.ALTERNATE), f"{OtherStyle.ALTERNATE}"
(, 'OtherStyle.ALTERNATE', 'ALTERNATE')
@@ -389,6 +395,8 @@ Data Types
Using :class:`auto` with :class:`Enum` results in integers of increasing value,
starting with ``1``.
+ .. versionchanged:: 3.12 Added :ref:`enum-dataclass-support`
+
.. class:: IntEnum
@@ -401,6 +409,7 @@ Data Types
... ONE = 1
... TWO = 2
... THREE = 3
+ ...
>>> Numbers.THREE
>>> Numbers.ONE + Numbers.TWO
@@ -461,6 +470,7 @@ Data Types
... RED = auto()
... GREEN = auto()
... BLUE = auto()
+ ...
>>> purple = Color.RED | Color.BLUE
>>> white = Color.RED | Color.GREEN | Color.BLUE
>>> Color.GREEN in purple
@@ -552,11 +562,11 @@ Data Types
Using :class:`auto` with :class:`Flag` results in integers that are powers
of two, starting with ``1``.
- .. versionchanged:: 3.11 The *repr()* of zero-valued flags has changed. It
+ .. versionchanged:: 3.11 The *repr()* of zero-valued flags has changed. It
is now::
- >>> Color(0) # doctest: +SKIP
-
+ >>> Color(0) # doctest: +SKIP
+
.. class:: IntFlag
@@ -568,6 +578,7 @@ Data Types
... RED = auto()
... GREEN = auto()
... BLUE = auto()
+ ...
>>> Color.RED & 2
>>> Color.RED | 2
@@ -600,7 +611,7 @@ Data Types
*replacement of existing constants* use-case. :meth:`~object.__format__` was
already :meth:`!int.__format__` for that same reason.
- Inversion of a :class:`!IntFlag` now returns a positive value that is the
+ Inversion of an :class:`!IntFlag` now returns a positive value that is the
union of all flags not in the given flag, rather than a negative value.
This matches the existing :class:`Flag` behavior.
@@ -612,7 +623,7 @@ Data Types
* :meth:`!int.__str__` for :class:`IntEnum` and :class:`IntFlag`
* :meth:`!str.__str__` for :class:`StrEnum`
- Inherit from :class:`!ReprEnum` to keep the :class:`str() / :func:`format`
+ Inherit from :class:`!ReprEnum` to keep the :class:`str() ` / :func:`format`
of the mixed-in data type instead of using the
:class:`Enum`-default :meth:`str() `.
@@ -658,7 +669,7 @@ Data Types
.. attribute:: NAMED_FLAGS
Ensure that any flag groups/masks contain only named flags -- useful when
- values are specified instead of being generated by :func:`auto`
+ values are specified instead of being generated by :func:`auto`::
>>> from enum import Flag, verify, NAMED_FLAGS
>>> @verify(NAMED_FLAGS)
@@ -693,6 +704,7 @@ Data Types
... RED = auto()
... GREEN = auto()
... BLUE = auto()
+ ...
>>> StrictFlag(2**2 + 2**4)
Traceback (most recent call last):
...
@@ -710,6 +722,7 @@ Data Types
... RED = auto()
... GREEN = auto()
... BLUE = auto()
+ ...
>>> ConformFlag(2**2 + 2**4)
@@ -723,6 +736,7 @@ Data Types
... RED = auto()
... GREEN = auto()
... BLUE = auto()
+ ...
>>> EjectFlag(2**2 + 2**4)
20
@@ -736,6 +750,7 @@ Data Types
... RED = auto()
... GREEN = auto()
... BLUE = auto()
+ ...
>>> KeepFlag(2**2 + 2**4)
@@ -804,6 +819,11 @@ Utilities and Decorators
* ``THREE = [auto(), -3]`` will *not* work (``, -3`` is used to
create the ``THREE`` enum member)
+ .. versionchanged:: 3.11.1
+
+ In prior versions, ``auto()`` had to be the only thing
+ on the assignment line to work properly.
+
``_generate_next_value_`` can be overridden to customize the values used by
*auto*.
@@ -885,23 +905,23 @@ Notes
:class:`IntEnum`, :class:`StrEnum`, and :class:`IntFlag`
- These three enum types are designed to be drop-in replacements for existing
- integer- and string-based values; as such, they have extra limitations:
+ These three enum types are designed to be drop-in replacements for existing
+ integer- and string-based values; as such, they have extra limitations:
- - ``__str__`` uses the value and not the name of the enum member
+ - ``__str__`` uses the value and not the name of the enum member
- - ``__format__``, because it uses ``__str__``, will also use the value of
- the enum member instead of its name
+ - ``__format__``, because it uses ``__str__``, will also use the value of
+ the enum member instead of its name
- If you do not need/want those limitations, you can either create your own
- base class by mixing in the ``int`` or ``str`` type yourself::
+ If you do not need/want those limitations, you can either create your own
+ base class by mixing in the ``int`` or ``str`` type yourself::
- >>> from enum import Enum
- >>> class MyIntEnum(int, Enum):
- ... pass
+ >>> from enum import Enum
+ >>> class MyIntEnum(int, Enum):
+ ... pass
or you can reassign the appropriate :meth:`str`, etc., in your enum::
- >>> from enum import IntEnum
- >>> class MyIntEnum(IntEnum):
- ... __str__ = IntEnum.__str__
+ >>> from enum import IntEnum
+ >>> class MyIntEnum(IntEnum):
+ ... __str__ = IntEnum.__str__
diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst
index 110e7e5d7fb9a7..2110990d188973 100644
--- a/Doc/library/functions.rst
+++ b/Doc/library/functions.rst
@@ -462,6 +462,7 @@ are always available. They are listed here in alphabetical order.
>>> class Shape:
... def __dir__(self):
... return ['area', 'perimeter', 'location']
+ ...
>>> s = Shape()
>>> dir(s)
['area', 'location', 'perimeter']
diff --git a/Doc/library/hashlib.rst b/Doc/library/hashlib.rst
index 8e47312fe77bf5..f8d10c0c295c7a 100644
--- a/Doc/library/hashlib.rst
+++ b/Doc/library/hashlib.rst
@@ -497,6 +497,7 @@ update the hash:
>>> h = blake2b()
>>> for item in items:
... h.update(item)
+ ...
>>> h.hexdigest()
'6ff843ba685842aa82031d3f53c48b66326df7639a63d128974c5c14f31a0f33343a8c65551134ed1ae0f2b0dd2bb495dc81039e3eeb0aa1bb0388bbeac29183'
diff --git a/Doc/library/http.server.rst b/Doc/library/http.server.rst
index 81b6bf5373b495..3290b9beab3ed9 100644
--- a/Doc/library/http.server.rst
+++ b/Doc/library/http.server.rst
@@ -512,3 +512,12 @@ Security Considerations
:class:`SimpleHTTPRequestHandler` will follow symbolic links when handling
requests, this makes it possible for files outside of the specified directory
to be served.
+
+Earlier versions of Python did not scrub control characters from the
+log messages emitted to stderr from ``python -m http.server`` or the
+default :class:`BaseHTTPRequestHandler` ``.log_message``
+implementation. This could allow remote clients connecting to your
+server to send nefarious control codes to your terminal.
+
+.. versionadded:: 3.12
+ Control characters are scrubbed in stderr logs.
diff --git a/Doc/library/inspect.rst b/Doc/library/inspect.rst
index 9cb7a6f94e49cd..6705577551dcc5 100644
--- a/Doc/library/inspect.rst
+++ b/Doc/library/inspect.rst
@@ -715,6 +715,7 @@ function.
>>> def test(a, b):
... pass
+ ...
>>> sig = signature(test)
>>> new_sig = sig.replace(return_annotation="new return anno")
>>> str(new_sig)
@@ -1054,6 +1055,7 @@ Classes and functions
>>> from inspect import getcallargs
>>> def f(a, b=1, *pos, **named):
... pass
+ ...
>>> getcallargs(f, 1, 2, 3) == {'a': 1, 'named': {}, 'b': 2, 'pos': (3,)}
True
>>> getcallargs(f, a=2, x=4) == {'a': 2, 'named': {'x': 4}, 'b': 1, 'pos': ()}
diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst
index 0b5978505a9672..624d2430ac20d7 100644
--- a/Doc/library/itertools.rst
+++ b/Doc/library/itertools.rst
@@ -52,7 +52,7 @@ Iterator Arguments Results
Iterator Arguments Results Example
============================ ============================ ================================================= =============================================================
:func:`accumulate` p [,func] p0, p0+p1, p0+p1+p2, ... ``accumulate([1,2,3,4,5]) --> 1 3 6 10 15``
-:func:`batched` p, n [p0, p1, ..., p_n-1], ... ``batched('ABCDEFG', n=3) --> ABC DEF G``
+:func:`batched` p, n (p0, p1, ..., p_n-1), ... ``batched('ABCDEFG', n=3) --> ABC DEF G``
:func:`chain` p, q, ... p0, p1, ... plast, q0, q1, ... ``chain('ABC', 'DEF') --> A B C D E F``
:func:`chain.from_iterable` iterable p0, p1, ... plast, q0, q1, ... ``chain.from_iterable(['ABC', 'DEF']) --> A B C D E F``
:func:`compress` data, selectors (d[0] if s[0]), (d[1] if s[1]), ... ``compress('ABCDEF', [1,0,1,0,1,1]) --> A C E F``
@@ -166,11 +166,11 @@ loops that truncate the stream.
.. function:: batched(iterable, n)
- Batch data from the *iterable* into lists of length *n*. The last
+ Batch data from the *iterable* into tuples of length *n*. The last
batch may be shorter than *n*.
- Loops over the input iterable and accumulates data into lists up to
- size *n*. The input is consumed lazily, just enough to fill a list.
+ Loops over the input iterable and accumulates data into tuples up to
+ size *n*. The input is consumed lazily, just enough to fill a batch.
The result is yielded as soon as the batch is full or when the input
iterable is exhausted:
@@ -179,14 +179,14 @@ loops that truncate the stream.
>>> flattened_data = ['roses', 'red', 'violets', 'blue', 'sugar', 'sweet']
>>> unflattened = list(batched(flattened_data, 2))
>>> unflattened
- [['roses', 'red'], ['violets', 'blue'], ['sugar', 'sweet']]
+ [('roses', 'red'), ('violets', 'blue'), ('sugar', 'sweet')]
>>> for batch in batched('ABCDEFG', 3):
... print(batch)
...
- ['A', 'B', 'C']
- ['D', 'E', 'F']
- ['G']
+ ('A', 'B', 'C')
+ ('D', 'E', 'F')
+ ('G',)
Roughly equivalent to::
@@ -195,7 +195,7 @@ loops that truncate the stream.
if n < 1:
raise ValueError('n must be at least one')
it = iter(iterable)
- while (batch := list(islice(it, n))):
+ while (batch := tuple(islice(it, n))):
yield batch
.. versionadded:: 3.12
diff --git a/Doc/library/profile.rst b/Doc/library/profile.rst
index 2d95096f4cb83a..c2189e02656c7a 100644
--- a/Doc/library/profile.rst
+++ b/Doc/library/profile.rst
@@ -274,7 +274,7 @@ functions:
with cProfile.Profile() as pr:
# ... do something ...
- pr.print_stats()
+ pr.print_stats()
.. versionchanged:: 3.8
Added context manager support.
diff --git a/Doc/library/re.rst b/Doc/library/re.rst
index e6e242320fd878..f7d46586cf7570 100644
--- a/Doc/library/re.rst
+++ b/Doc/library/re.rst
@@ -973,6 +973,7 @@ Functions
>>> def dashrepl(matchobj):
... if matchobj.group(0) == '-': return ' '
... else: return '-'
+ ...
>>> re.sub('-{1,2}', dashrepl, 'pro----gram-files')
'pro--gram files'
>>> re.sub(r'\sAND\s', ' & ', 'Baked Beans And Spam', flags=re.IGNORECASE)
@@ -1672,6 +1673,7 @@ in each word of a sentence except for the first and last characters::
... inner_word = list(m.group(2))
... random.shuffle(inner_word)
... return m.group(1) + "".join(inner_word) + m.group(3)
+ ...
>>> text = "Professor Abdolmalek, please report your absences promptly."
>>> re.sub(r"(\w)(\w+)(\w)", repl, text)
'Poefsrosr Aealmlobdk, pslaee reorpt your abnseces plmrptoy.'
diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst
index c946407ea1d83f..de2e1aa3868bb3 100644
--- a/Doc/library/socket.rst
+++ b/Doc/library/socket.rst
@@ -428,7 +428,14 @@ Constants
.. versionchanged:: 3.12
Added ``SO_RTABLE`` and ``SO_USER_COOKIE``. On OpenBSD
and FreeBSD respectively those constants can be used in the same way that
- ``SO_MARK`` is used on Linux.
+ ``SO_MARK`` is used on Linux. Also added missing TCP socket options from
+ Linux: ``TCP_MD5SIG``, ``TCP_THIN_LINEAR_TIMEOUTS``, ``TCP_THIN_DUPACK``,
+ ``TCP_REPAIR``, ``TCP_REPAIR_QUEUE``, ``TCP_QUEUE_SEQ``,
+ ``TCP_REPAIR_OPTIONS``, ``TCP_TIMESTAMP``, ``TCP_CC_INFO``,
+ ``TCP_SAVE_SYN``, ``TCP_SAVED_SYN``, ``TCP_REPAIR_WINDOW``,
+ ``TCP_FASTOPEN_CONNECT``, ``TCP_ULP``, ``TCP_MD5SIG_EXT``,
+ ``TCP_FASTOPEN_KEY``, ``TCP_FASTOPEN_NO_COOKIE``,
+ ``TCP_ZEROCOPY_RECEIVE``, ``TCP_INQ``, ``TCP_TX_DELAY``.
.. data:: AF_CAN
PF_CAN
diff --git a/Doc/library/sqlite3.rst b/Doc/library/sqlite3.rst
index 960f2966afe1f2..3622864a4b06f9 100644
--- a/Doc/library/sqlite3.rst
+++ b/Doc/library/sqlite3.rst
@@ -397,6 +397,7 @@ Module functions
>>> con = sqlite3.connect(":memory:")
>>> def evil_trace(stmt):
... 5/0
+ ...
>>> con.set_trace_callback(evil_trace)
>>> def debug(unraisable):
... print(f"{unraisable.exc_value!r} in callback {unraisable.object.__name__}")
@@ -1929,12 +1930,16 @@ How to use placeholders to bind values in SQL queries
SQL operations usually need to use values from Python variables. However,
beware of using Python's string operations to assemble queries, as they
-are vulnerable to `SQL injection attacks`_ (see the `xkcd webcomic
-`_ for a humorous example of what can go wrong)::
-
- # Never do this -- insecure!
- symbol = 'RHAT'
- cur.execute("SELECT * FROM stocks WHERE symbol = '%s'" % symbol)
+are vulnerable to `SQL injection attacks`_. For example, an attacker can simply
+close the single quote and inject ``OR TRUE`` to select all rows::
+
+ >>> # Never do this -- insecure!
+ >>> symbol = input()
+ ' OR TRUE; --
+ >>> sql = "SELECT * FROM stocks WHERE symbol = '%s'" % symbol
+ >>> print(sql)
+ SELECT * FROM stocks WHERE symbol = '' OR TRUE; --'
+ >>> cur.execute(sql)
Instead, use the DB-API's parameter substitution. To insert a variable into a
query string, use a placeholder in the string, and substitute the actual values
diff --git a/Doc/library/statistics.rst b/Doc/library/statistics.rst
index 88a887960edb58..f934b0e0319dca 100644
--- a/Doc/library/statistics.rst
+++ b/Doc/library/statistics.rst
@@ -996,6 +996,7 @@ probability that the Python room will stay within its capacity limits?
>>> seed(8675309)
>>> def trial():
... return choices(('Python', 'Ruby'), (p, q), k=n).count('Python')
+ ...
>>> mean(trial() <= k for i in range(10_000))
0.8398
diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst
index 785b76a11f2f38..c785336944f50a 100644
--- a/Doc/library/stdtypes.rst
+++ b/Doc/library/stdtypes.rst
@@ -4459,6 +4459,7 @@ can be used interchangeably to index the same dictionary entry.
>>> class Counter(dict):
... def __missing__(self, key):
... return 0
+ ...
>>> c = Counter()
>>> c['red']
0
@@ -4716,6 +4717,7 @@ An example of dictionary view usage::
>>> n = 0
>>> for val in values:
... n += val
+ ...
>>> print(n)
504
@@ -4741,7 +4743,7 @@ An example of dictionary view usage::
>>> # get back a read-only proxy for the original dictionary
>>> values.mapping
- mappingproxy({'eggs': 2, 'sausage': 1, 'bacon': 1, 'spam': 500})
+ mappingproxy({'bacon': 1, 'spam': 500})
>>> values.mapping['spam']
500
@@ -5501,7 +5503,7 @@ When an operation would exceed the limit, a :exc:`ValueError` is raised:
>>> _ = int('2' * 5432)
Traceback (most recent call last):
...
- ValueError: Exceeds the limit (4300) for integer string conversion: value has 5432 digits; use sys.set_int_max_str_digits() to increase the limit.
+ ValueError: Exceeds the limit (4300 digits) for integer string conversion: value has 5432 digits; use sys.set_int_max_str_digits() to increase the limit.
>>> i = int('2' * 4300)
>>> len(str(i))
4300
@@ -5509,7 +5511,7 @@ When an operation would exceed the limit, a :exc:`ValueError` is raised:
>>> len(str(i_squared))
Traceback (most recent call last):
...
- ValueError: Exceeds the limit (4300) for integer string conversion: value has 8599 digits; use sys.set_int_max_str_digits() to increase the limit.
+ ValueError: Exceeds the limit (4300 digits) for integer string conversion: value has 8599 digits; use sys.set_int_max_str_digits() to increase the limit.
>>> len(hex(i_squared))
7144
>>> assert int(hex(i_squared), base=16) == i*i # Hexadecimal is unlimited.
diff --git a/Doc/library/sysconfig.rst b/Doc/library/sysconfig.rst
index 024988777030f8..839c2c015b49ae 100644
--- a/Doc/library/sysconfig.rst
+++ b/Doc/library/sysconfig.rst
@@ -121,7 +121,7 @@ identifier. Python currently uses eight paths:
Return the default scheme name for the current platform.
- .. versionchanged:: 3.10
+ .. versionadded:: 3.10
This function was previously named ``_get_default_scheme()`` and
considered an implementation detail.
diff --git a/Doc/library/typing.rst b/Doc/library/typing.rst
index 94c9cb11f02d6d..356f919a1897b2 100644
--- a/Doc/library/typing.rst
+++ b/Doc/library/typing.rst
@@ -2575,6 +2575,10 @@ Functions and decorators
assumed to be True or False if it is omitted by the caller.
* ``kw_only_default`` indicates whether the ``kw_only`` parameter is
assumed to be True or False if it is omitted by the caller.
+ * ``frozen_default`` indicates whether the ``frozen`` parameter is
+ assumed to be True or False if it is omitted by the caller.
+
+ .. versionadded:: 3.12
* ``field_specifiers`` specifies a static list of supported classes
or functions that describe fields, similar to ``dataclasses.field()``.
* Arbitrary other keyword arguments are accepted in order to allow for
diff --git a/Doc/library/unittest.mock.rst b/Doc/library/unittest.mock.rst
index b768557e6075f6..e009f303fef317 100644
--- a/Doc/library/unittest.mock.rst
+++ b/Doc/library/unittest.mock.rst
@@ -1604,6 +1604,7 @@ decorator:
>>> @patch.dict(foo, {'newkey': 'newvalue'})
... def test():
... assert foo == {'newkey': 'newvalue'}
+ ...
>>> test()
>>> assert foo == {}
diff --git a/Doc/library/venv.rst b/Doc/library/venv.rst
index adc6cd339ac157..2a41096de006b8 100644
--- a/Doc/library/venv.rst
+++ b/Doc/library/venv.rst
@@ -497,76 +497,68 @@ subclass which installs setuptools and pip into a created virtual environment::
url = 'https://bootstrap.pypa.io/get-pip.py'
self.install_script(context, 'pip', url)
+
def main(args=None):
- compatible = True
- if sys.version_info < (3, 3):
- compatible = False
- elif not hasattr(sys, 'base_prefix'):
- compatible = False
- if not compatible:
- raise ValueError('This script is only for use with '
- 'Python 3.3 or later')
+ import argparse
+
+ parser = argparse.ArgumentParser(prog=__name__,
+ description='Creates virtual Python '
+ 'environments in one or '
+ 'more target '
+ 'directories.')
+ parser.add_argument('dirs', metavar='ENV_DIR', nargs='+',
+ help='A directory in which to create the '
+ 'virtual environment.')
+ parser.add_argument('--no-setuptools', default=False,
+ action='store_true', dest='nodist',
+ help="Don't install setuptools or pip in the "
+ "virtual environment.")
+ parser.add_argument('--no-pip', default=False,
+ action='store_true', dest='nopip',
+ help="Don't install pip in the virtual "
+ "environment.")
+ parser.add_argument('--system-site-packages', default=False,
+ action='store_true', dest='system_site',
+ help='Give the virtual environment access to the '
+ 'system site-packages dir.')
+ if os.name == 'nt':
+ use_symlinks = False
else:
- import argparse
-
- parser = argparse.ArgumentParser(prog=__name__,
- description='Creates virtual Python '
- 'environments in one or '
- 'more target '
- 'directories.')
- parser.add_argument('dirs', metavar='ENV_DIR', nargs='+',
- help='A directory in which to create the '
- 'virtual environment.')
- parser.add_argument('--no-setuptools', default=False,
- action='store_true', dest='nodist',
- help="Don't install setuptools or pip in the "
- "virtual environment.")
- parser.add_argument('--no-pip', default=False,
- action='store_true', dest='nopip',
- help="Don't install pip in the virtual "
- "environment.")
- parser.add_argument('--system-site-packages', default=False,
- action='store_true', dest='system_site',
- help='Give the virtual environment access to the '
- 'system site-packages dir.')
- if os.name == 'nt':
- use_symlinks = False
- else:
- use_symlinks = True
- parser.add_argument('--symlinks', default=use_symlinks,
- action='store_true', dest='symlinks',
- help='Try to use symlinks rather than copies, '
- 'when symlinks are not the default for '
- 'the platform.')
- parser.add_argument('--clear', default=False, action='store_true',
- dest='clear', help='Delete the contents of the '
- 'virtual environment '
- 'directory if it already '
- 'exists, before virtual '
- 'environment creation.')
- parser.add_argument('--upgrade', default=False, action='store_true',
- dest='upgrade', help='Upgrade the virtual '
- 'environment directory to '
- 'use this version of '
- 'Python, assuming Python '
- 'has been upgraded '
- 'in-place.')
- parser.add_argument('--verbose', default=False, action='store_true',
- dest='verbose', help='Display the output '
- 'from the scripts which '
- 'install setuptools and pip.')
- options = parser.parse_args(args)
- if options.upgrade and options.clear:
- raise ValueError('you cannot supply --upgrade and --clear together.')
- builder = ExtendedEnvBuilder(system_site_packages=options.system_site,
- clear=options.clear,
- symlinks=options.symlinks,
- upgrade=options.upgrade,
- nodist=options.nodist,
- nopip=options.nopip,
- verbose=options.verbose)
- for d in options.dirs:
- builder.create(d)
+ use_symlinks = True
+ parser.add_argument('--symlinks', default=use_symlinks,
+ action='store_true', dest='symlinks',
+ help='Try to use symlinks rather than copies, '
+ 'when symlinks are not the default for '
+ 'the platform.')
+ parser.add_argument('--clear', default=False, action='store_true',
+ dest='clear', help='Delete the contents of the '
+ 'virtual environment '
+ 'directory if it already '
+ 'exists, before virtual '
+ 'environment creation.')
+ parser.add_argument('--upgrade', default=False, action='store_true',
+ dest='upgrade', help='Upgrade the virtual '
+ 'environment directory to '
+ 'use this version of '
+ 'Python, assuming Python '
+ 'has been upgraded '
+ 'in-place.')
+ parser.add_argument('--verbose', default=False, action='store_true',
+ dest='verbose', help='Display the output '
+ 'from the scripts which '
+ 'install setuptools and pip.')
+ options = parser.parse_args(args)
+ if options.upgrade and options.clear:
+ raise ValueError('you cannot supply --upgrade and --clear together.')
+ builder = ExtendedEnvBuilder(system_site_packages=options.system_site,
+ clear=options.clear,
+ symlinks=options.symlinks,
+ upgrade=options.upgrade,
+ nodist=options.nodist,
+ nopip=options.nopip,
+ verbose=options.verbose)
+ for d in options.dirs:
+ builder.create(d)
if __name__ == '__main__':
rc = 1
diff --git a/Doc/library/xml.etree.elementtree.rst b/Doc/library/xml.etree.elementtree.rst
index 2fe0d2e082fb3a..876de29b17ca3c 100644
--- a/Doc/library/xml.etree.elementtree.rst
+++ b/Doc/library/xml.etree.elementtree.rst
@@ -1212,6 +1212,7 @@ Example of changing the attribute "target" of every link in first paragraph::
[, ]
>>> for i in links: # Iterates through all found links
... i.attrib["target"] = "blank"
+ ...
>>> tree.write("output.xhtml")
.. _elementtree-qname-objects:
diff --git a/Doc/library/zipfile.rst b/Doc/library/zipfile.rst
index 4dd9fa961a8d98..82709dbc92496d 100644
--- a/Doc/library/zipfile.rst
+++ b/Doc/library/zipfile.rst
@@ -672,6 +672,7 @@ The :class:`PyZipFile` constructor takes the same parameters as the
>>> def notests(s):
... fn = os.path.basename(s)
... return (not (fn == 'test' or fn.startswith('test_')))
+ ...
>>> zf.writepy('myprog', filterfunc=notests)
The :meth:`writepy` method makes archives with file names like
diff --git a/Doc/requirements.txt b/Doc/requirements.txt
index 958665db69e227..134f39d6d7b3d4 100644
--- a/Doc/requirements.txt
+++ b/Doc/requirements.txt
@@ -8,6 +8,7 @@ sphinx==4.5.0
blurb
sphinx-lint==0.6.7
+sphinxext-opengraph>=0.7.1
# The theme used by the documentation is stored separately, so we need
# to install that as well.
diff --git a/Doc/tools/templates/layout.html b/Doc/tools/templates/layout.html
index 98ccf4224804b2..460161cd320223 100644
--- a/Doc/tools/templates/layout.html
+++ b/Doc/tools/templates/layout.html
@@ -8,6 +8,19 @@
{% trans %} Python documentation for the current stable release{% endtrans %}.
{%- endif %}
+
+{%- if is_deployment_preview %}
+
+{%- endif %}
{% endblock %}
{% block rootrellink %}
diff --git a/Doc/tutorial/classes.rst b/Doc/tutorial/classes.rst
index 9ecbf8b87efbf1..0e5a9402bc50e3 100644
--- a/Doc/tutorial/classes.rst
+++ b/Doc/tutorial/classes.rst
@@ -119,12 +119,12 @@ directly accessible:
* the innermost scope, which is searched first, contains the local names
* the scopes of any enclosing functions, which are searched starting with the
- nearest enclosing scope, contains non-local, but also non-global names
+ nearest enclosing scope, contain non-local, but also non-global names
* the next-to-last scope contains the current module's global names
* the outermost scope (searched last) is the namespace containing built-in names
If a name is declared global, then all references and assignments go directly to
-the middle scope containing the module's global names. To rebind variables
+the next-to-last scope containing the module's global names. To rebind variables
found outside of the innermost scope, the :keyword:`nonlocal` statement can be
used; if not declared nonlocal, those variables are read-only (an attempt to
write to such a variable will simply create a *new* local variable in the
diff --git a/Doc/using/configure.rst b/Doc/using/configure.rst
index 0922972f9bf1dc..db4bf7412292bc 100644
--- a/Doc/using/configure.rst
+++ b/Doc/using/configure.rst
@@ -2,6 +2,47 @@
Configure Python
****************
+Build Requirements
+==================
+
+Features required to build CPython:
+
+* A `C11 `_ compiler. `Optional C11
+ features
+ `_
+ are not required.
+
+* Support for `IEEE 754 `_ floating
+ point numbers and `floating point Not-a-Number (NaN)
+ `_.
+
+* Support for threads.
+
+* OpenSSL 1.1.1 or newer for the :mod:`ssl` and :mod:`hashlib` modules.
+
+* On Windows, Microsoft Visual Studio 2017 or later is required.
+
+.. versionchanged:: 3.11
+ C11 compiler, IEEE 754 and NaN support are now required.
+ On Windows, Visual Studio 2017 or later is required.
+
+.. versionchanged:: 3.10
+ OpenSSL 1.1.1 is now required.
+
+.. versionchanged:: 3.7
+ Thread support and OpenSSL 1.0.2 are now required.
+
+.. versionchanged:: 3.6
+ Selected C99 features are now required, like ```` and ``static
+ inline`` functions.
+
+.. versionchanged:: 3.5
+ On Windows, Visual Studio 2015 or later is required.
+
+See also :pep:`7` "Style Guide for C Code" and :pep:`11` "CPython platform
+support".
+
+
.. _configure-options:
Configure Options
@@ -93,6 +134,12 @@ General Options
See :envvar:`PYTHONCOERCECLOCALE` and the :pep:`538`.
+.. cmdoption:: --without-freelists
+
+ Disable all freelists except the empty tuple singleton.
+
+ .. versionadded:: 3.11
+
.. cmdoption:: --with-platlibdir=DIRNAME
Python library directory name (default is ``lib``).
diff --git a/Doc/whatsnew/2.7.rst b/Doc/whatsnew/2.7.rst
index 276ab63b97f8a9..810a2cd2537c34 100644
--- a/Doc/whatsnew/2.7.rst
+++ b/Doc/whatsnew/2.7.rst
@@ -1331,6 +1331,7 @@ changes, or look through the Subversion logs for all the details.
>>> from inspect import getcallargs
>>> def f(a, b=1, *pos, **named):
... pass
+ ...
>>> getcallargs(f, 1, 2, 3)
{'a': 1, 'b': 2, 'pos': (3,), 'named': {}}
>>> getcallargs(f, a=2, x=4)
diff --git a/Doc/whatsnew/3.11.rst b/Doc/whatsnew/3.11.rst
index 6eb90df89cac0e..7931988ed0b04d 100644
--- a/Doc/whatsnew/3.11.rst
+++ b/Doc/whatsnew/3.11.rst
@@ -2131,7 +2131,7 @@ Build Changes
(Contributed by Dong-hee Na and Brett Holman in :issue:`44340`.)
* Freelists for object structs can now be disabled. A new :program:`configure`
- option :option:`!--without-freelists` can be used to disable all freelists
+ option :option:`--without-freelists` can be used to disable all freelists
except empty tuple singleton.
(Contributed by Christian Heimes in :issue:`45522`.)
diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst
index c0f98b59ccaf0f..73dc462f0b3303 100644
--- a/Doc/whatsnew/3.12.rst
+++ b/Doc/whatsnew/3.12.rst
@@ -178,6 +178,10 @@ Other Language Changes
In a future Python version they will be eventually a :exc:`SyntaxError`.
(Contributed by Victor Stinner in :gh:`98401`.)
+* All builtin and extension callables expecting boolean parameters now accept
+ arguments of any type instead of just :class:`bool` and :class:`int`.
+ (Contributed by Serhiy Storchaka in :gh:`60203`.)
+
New Modules
===========
@@ -682,6 +686,18 @@ Changes in the Python API
around process-global resources, which are best managed from the main interpreter.
(Contributed by Dong-hee Na in :gh:`99127`.)
+* :func:`asyncio.get_event_loop` and many other :mod:`asyncio` functions like
+ :func:`~asyncio.ensure_future`, :func:`~asyncio.shield` or
+ :func:`~asyncio.gather`, and also the
+ :meth:`~asyncio.BaseDefaultEventLoopPolicy.get_event_loop` method of
+ :class:`~asyncio.BaseDefaultEventLoopPolicy` now raise a :exc:`RuntimeError`
+ if called when there is no running event loop and the current event loop was
+ not set.
+ Previously they implicitly created and set a new current event loop.
+ :exc:`DeprecationWarning` is no longer emitted if there is no running
+ event loop but the current event loop is set in the policy.
+ (Contributed by Serhiy Storchaka in :gh:`93453`.)
+
Build Changes
=============
@@ -773,6 +789,10 @@ New Features
callbacks to receive notification on changes to a type.
(Contributed by Carl Meyer in :gh:`91051`.)
+* Added :c:func:`PyCode_AddWatcher` and :c:func:`PyCode_ClearWatcher`
+ APIs to register callbacks to receive notification on creation and
+ destruction of code objects.
+ (Contributed by Itamar Ostricher in :gh:`91054`.)
* Add :c:func:`PyFrame_GetVar` and :c:func:`PyFrame_GetVarString` functions to
get a frame variable by its name.
@@ -831,6 +851,11 @@ Porting to Python 3.12
:class:`bytes` type is accepted for bytes strings.
(Contributed by Victor Stinner in :gh:`98393`.)
+* The :c:macro:`Py_CLEAR`, :c:macro:`Py_SETREF` and :c:macro:`Py_XSETREF`
+ macros now only evaluate their arguments once. If an argument has side
+ effects, these side effects are no longer duplicated.
+ (Contributed by Victor Stinner in :gh:`98724`.)
+
Deprecated
----------
diff --git a/Doc/whatsnew/3.2.rst b/Doc/whatsnew/3.2.rst
index 6037db9f954d26..1b1455b72b9291 100644
--- a/Doc/whatsnew/3.2.rst
+++ b/Doc/whatsnew/3.2.rst
@@ -468,6 +468,7 @@ Some smaller changes made to the core Python language are:
>>> class LowerCasedDict(dict):
... def __getitem__(self, key):
... return dict.__getitem__(self, key.lower())
+ ...
>>> lcd = LowerCasedDict(part='widgets', quantity=10)
>>> 'There are {QUANTITY} {Part} in stock'.format_map(lcd)
'There are 10 widgets in stock'
@@ -475,6 +476,7 @@ Some smaller changes made to the core Python language are:
>>> class PlaceholderDict(dict):
... def __missing__(self, key):
... return '<{}>'.format(key)
+ ...
>>> 'Hello {name}, welcome to {location}'.format_map(PlaceholderDict())
'Hello , welcome to '
@@ -1886,6 +1888,7 @@ inspect
>>> from inspect import getgeneratorstate
>>> def gen():
... yield 'demo'
+ ...
>>> g = gen()
>>> getgeneratorstate(g)
'GEN_CREATED'
diff --git a/Doc/whatsnew/3.3.rst b/Doc/whatsnew/3.3.rst
index 96a632577b2c56..9e8d42469b019c 100644
--- a/Doc/whatsnew/3.3.rst
+++ b/Doc/whatsnew/3.3.rst
@@ -560,6 +560,7 @@ Example with (non-bound) methods::
>>> class C:
... def meth(self):
... pass
+ ...
>>> C.meth.__name__
'meth'
>>> C.meth.__qualname__
diff --git a/Include/cpython/code.h b/Include/cpython/code.h
index fd57e0035bc09a..6a13ff2dfd9fd1 100644
--- a/Include/cpython/code.h
+++ b/Include/cpython/code.h
@@ -16,21 +16,24 @@ extern "C" {
* 2**32 - 1, rather than INT_MAX.
*/
-typedef uint16_t _Py_CODEUNIT;
-
-#ifdef WORDS_BIGENDIAN
-# define _Py_OPCODE(word) ((word) >> 8)
-# define _Py_OPARG(word) ((word) & 255)
-# define _Py_MAKECODEUNIT(opcode, oparg) (((opcode)<<8)|(oparg))
-#else
-# define _Py_OPCODE(word) ((word) & 255)
-# define _Py_OPARG(word) ((word) >> 8)
-# define _Py_MAKECODEUNIT(opcode, oparg) ((opcode)|((oparg)<<8))
-#endif
+typedef union {
+ uint16_t cache;
+ struct {
+ uint8_t opcode;
+ uint8_t oparg;
+ };
+} _Py_CODEUNIT;
+
+#define _Py_OPCODE(word) ((word).opcode)
+#define _Py_OPARG(word) ((word).oparg)
+
+static inline void
+_py_set_opocde(_Py_CODEUNIT *word, uint8_t opcode)
+{
+ word->opcode = opcode;
+}
-// Use "unsigned char" instead of "uint8_t" here to avoid illegal aliasing:
-#define _Py_SET_OPCODE(word, opcode) \
- do { ((unsigned char *)&(word))[0] = (opcode); } while (0)
+#define _Py_SET_OPCODE(word, opcode) _py_set_opocde(&(word), opcode)
typedef struct {
PyObject *_co_code;
@@ -87,6 +90,7 @@ typedef struct {
int co_nplaincellvars; /* number of non-arg cell variables */ \
int co_ncellvars; /* total number of cell variables */ \
int co_nfreevars; /* number of free variables */ \
+ uint32_t co_version; /* version number */ \
\
PyObject *co_localsplusnames; /* tuple mapping offsets to names */ \
PyObject *co_localspluskinds; /* Bytes mapping to local kinds (one byte \
@@ -181,6 +185,41 @@ PyAPI_FUNC(int) PyCode_Addr2Line(PyCodeObject *, int);
PyAPI_FUNC(int) PyCode_Addr2Location(PyCodeObject *, int, int *, int *, int *, int *);
+typedef enum PyCodeEvent {
+ PY_CODE_EVENT_CREATE,
+ PY_CODE_EVENT_DESTROY
+} PyCodeEvent;
+
+
+/*
+ * A callback that is invoked for different events in a code object's lifecycle.
+ *
+ * The callback is invoked with a borrowed reference to co, after it is
+ * created and before it is destroyed.
+ *
+ * If the callback returns with an exception set, it must return -1. Otherwise
+ * it should return 0.
+ */
+typedef int (*PyCode_WatchCallback)(
+ PyCodeEvent event,
+ PyCodeObject* co);
+
+/*
+ * Register a per-interpreter callback that will be invoked for code object
+ * lifecycle events.
+ *
+ * Returns a handle that may be passed to PyCode_ClearWatcher on success,
+ * or -1 and sets an error if no more handles are available.
+ */
+PyAPI_FUNC(int) PyCode_AddWatcher(PyCode_WatchCallback callback);
+
+/*
+ * Clear the watcher associated with the watcher_id handle.
+ *
+ * Returns 0 on success or -1 if no watcher exists for the provided id.
+ */
+PyAPI_FUNC(int) PyCode_ClearWatcher(int watcher_id);
+
/* for internal use only */
struct _opaque {
int computed_line;
diff --git a/Include/cpython/object.h b/Include/cpython/object.h
index 3abfcb7d44f0fb..4263370861302b 100644
--- a/Include/cpython/object.h
+++ b/Include/cpython/object.h
@@ -305,38 +305,69 @@ _PyObject_GenericSetAttrWithDict(PyObject *, PyObject *,
PyAPI_FUNC(PyObject *) _PyObject_FunctionStr(PyObject *);
-/* Safely decref `op` and set `op` to `op2`.
+/* Safely decref `dst` and set `dst` to `src`.
*
* As in case of Py_CLEAR "the obvious" code can be deadly:
*
- * Py_DECREF(op);
- * op = op2;
+ * Py_DECREF(dst);
+ * dst = src;
*
* The safe way is:
*
- * Py_SETREF(op, op2);
+ * Py_SETREF(dst, src);
*
- * That arranges to set `op` to `op2` _before_ decref'ing, so that any code
- * triggered as a side-effect of `op` getting torn down no longer believes
- * `op` points to a valid object.
+ * That arranges to set `dst` to `src` _before_ decref'ing, so that any code
+ * triggered as a side-effect of `dst` getting torn down no longer believes
+ * `dst` points to a valid object.
*
- * Py_XSETREF is a variant of Py_SETREF that uses Py_XDECREF instead of
- * Py_DECREF.
+ * Temporary variables are used to only evalutate macro arguments once and so
+ * avoid the duplication of side effects. _Py_TYPEOF() or memcpy() is used to
+ * avoid a miscompilation caused by type punning. See Py_CLEAR() comment for
+ * implementation details about type punning.
+ *
+ * The memcpy() implementation does not emit a compiler warning if 'src' has
+ * not the same type than 'src': any pointer type is accepted for 'src'.
*/
-
-#define Py_SETREF(op, op2) \
- do { \
- PyObject *_py_tmp = _PyObject_CAST(op); \
- (op) = (op2); \
- Py_DECREF(_py_tmp); \
+#ifdef _Py_TYPEOF
+#define Py_SETREF(dst, src) \
+ do { \
+ _Py_TYPEOF(dst)* _tmp_dst_ptr = &(dst); \
+ _Py_TYPEOF(dst) _tmp_old_dst = (*_tmp_dst_ptr); \
+ *_tmp_dst_ptr = (src); \
+ Py_DECREF(_tmp_old_dst); \
} while (0)
+#else
+#define Py_SETREF(dst, src) \
+ do { \
+ PyObject **_tmp_dst_ptr = _Py_CAST(PyObject**, &(dst)); \
+ PyObject *_tmp_old_dst = (*_tmp_dst_ptr); \
+ PyObject *_tmp_src = _PyObject_CAST(src); \
+ memcpy(_tmp_dst_ptr, &_tmp_src, sizeof(PyObject*)); \
+ Py_DECREF(_tmp_old_dst); \
+ } while (0)
+#endif
-#define Py_XSETREF(op, op2) \
- do { \
- PyObject *_py_tmp = _PyObject_CAST(op); \
- (op) = (op2); \
- Py_XDECREF(_py_tmp); \
+/* Py_XSETREF() is a variant of Py_SETREF() that uses Py_XDECREF() instead of
+ * Py_DECREF().
+ */
+#ifdef _Py_TYPEOF
+#define Py_XSETREF(dst, src) \
+ do { \
+ _Py_TYPEOF(dst)* _tmp_dst_ptr = &(dst); \
+ _Py_TYPEOF(dst) _tmp_old_dst = (*_tmp_dst_ptr); \
+ *_tmp_dst_ptr = (src); \
+ Py_XDECREF(_tmp_old_dst); \
+ } while (0)
+#else
+#define Py_XSETREF(dst, src) \
+ do { \
+ PyObject **_tmp_dst_ptr = _Py_CAST(PyObject**, &(dst)); \
+ PyObject *_tmp_old_dst = (*_tmp_dst_ptr); \
+ PyObject *_tmp_src = _PyObject_CAST(src); \
+ memcpy(_tmp_dst_ptr, &_tmp_src, sizeof(PyObject*)); \
+ Py_XDECREF(_tmp_old_dst); \
} while (0)
+#endif
PyAPI_DATA(PyTypeObject) _PyNone_Type;
diff --git a/Include/cpython/pystate.h b/Include/cpython/pystate.h
index c51542bcc895cb..0117c23f518cdb 100644
--- a/Include/cpython/pystate.h
+++ b/Include/cpython/pystate.h
@@ -353,6 +353,9 @@ PyAPI_FUNC(const PyConfig*) _Py_GetConfig(void);
// is necessary to pass safely between interpreters in the same process.
typedef struct _xid _PyCrossInterpreterData;
+typedef PyObject *(*xid_newobjectfunc)(_PyCrossInterpreterData *);
+typedef void (*xid_freefunc)(void *);
+
struct _xid {
// data is the cross-interpreter-safe derivation of a Python object
// (see _PyObject_GetCrossInterpreterData). It will be NULL if the
@@ -379,7 +382,7 @@ struct _xid {
// interpreter given the data. The resulting object (a new
// reference) will be equivalent to the original object. This field
// is required.
- PyObject *(*new_object)(_PyCrossInterpreterData *);
+ xid_newobjectfunc new_object;
// free is called when the data is released. If it is NULL then
// nothing will be done to free the data. For some types this is
// okay (e.g. bytes) and for those types this field should be set
@@ -389,18 +392,31 @@ struct _xid {
// leak. In that case, at the very least this field should be set
// to PyMem_RawFree (the default if not explicitly set to NULL).
// The call will happen with the original interpreter activated.
- void (*free)(void *);
+ xid_freefunc free;
};
+PyAPI_FUNC(void) _PyCrossInterpreterData_Init(
+ _PyCrossInterpreterData *data,
+ PyInterpreterState *interp, void *shared, PyObject *obj,
+ xid_newobjectfunc new_object);
+PyAPI_FUNC(int) _PyCrossInterpreterData_InitWithSize(
+ _PyCrossInterpreterData *,
+ PyInterpreterState *interp, const size_t, PyObject *,
+ xid_newobjectfunc);
+PyAPI_FUNC(void) _PyCrossInterpreterData_Clear(
+ PyInterpreterState *, _PyCrossInterpreterData *);
+
PyAPI_FUNC(int) _PyObject_GetCrossInterpreterData(PyObject *, _PyCrossInterpreterData *);
PyAPI_FUNC(PyObject *) _PyCrossInterpreterData_NewObject(_PyCrossInterpreterData *);
-PyAPI_FUNC(void) _PyCrossInterpreterData_Release(_PyCrossInterpreterData *);
+PyAPI_FUNC(int) _PyCrossInterpreterData_Release(_PyCrossInterpreterData *);
PyAPI_FUNC(int) _PyObject_CheckCrossInterpreterData(PyObject *);
/* cross-interpreter data registry */
-typedef int (*crossinterpdatafunc)(PyObject *, _PyCrossInterpreterData *);
+typedef int (*crossinterpdatafunc)(PyThreadState *tstate, PyObject *,
+ _PyCrossInterpreterData *);
PyAPI_FUNC(int) _PyCrossInterpreterData_RegisterClass(PyTypeObject *, crossinterpdatafunc);
+PyAPI_FUNC(int) _PyCrossInterpreterData_UnregisterClass(PyTypeObject *);
PyAPI_FUNC(crossinterpdatafunc) _PyCrossInterpreterData_Lookup(PyObject *);
diff --git a/Include/cpython/unicodeobject.h b/Include/cpython/unicodeobject.h
index a75336f590e81b..75a74ffa2f9dff 100644
--- a/Include/cpython/unicodeobject.h
+++ b/Include/cpython/unicodeobject.h
@@ -231,7 +231,7 @@ enum PyUnicode_Kind {
// new compiler warnings on "kind < PyUnicode_KIND(str)" (compare signed and
// unsigned numbers) where kind type is an int or on
// "unsigned int kind = PyUnicode_KIND(str)" (cast signed to unsigned).
-#define PyUnicode_KIND(op) (_PyASCIIObject_CAST(op)->state.kind)
+#define PyUnicode_KIND(op) _Py_RVALUE(_PyASCIIObject_CAST(op)->state.kind)
/* Return a void pointer to the raw unicode buffer. */
static inline void* _PyUnicode_COMPACT_DATA(PyObject *op) {
diff --git a/Include/exports.h b/Include/exports.h
index fc1a5c5ead6276..59373c39ff757c 100644
--- a/Include/exports.h
+++ b/Include/exports.h
@@ -2,9 +2,15 @@
#define Py_EXPORTS_H
#if defined(_WIN32) || defined(__CYGWIN__)
- #define Py_IMPORTED_SYMBOL __declspec(dllimport)
- #define Py_EXPORTED_SYMBOL __declspec(dllexport)
- #define Py_LOCAL_SYMBOL
+ #if defined(Py_ENABLE_SHARED)
+ #define Py_IMPORTED_SYMBOL __declspec(dllimport)
+ #define Py_EXPORTED_SYMBOL __declspec(dllexport)
+ #define Py_LOCAL_SYMBOL
+ #else
+ #define Py_IMPORTED_SYMBOL
+ #define Py_EXPORTED_SYMBOL
+ #define Py_LOCAL_SYMBOL
+ #endif
#else
/*
* If we only ever used gcc >= 5, we could use __has_attribute(visibility)
diff --git a/Include/internal/pycore_ceval_state.h b/Include/internal/pycore_ceval_state.h
new file mode 100644
index 00000000000000..9ba42eb03b2676
--- /dev/null
+++ b/Include/internal/pycore_ceval_state.h
@@ -0,0 +1,100 @@
+#ifndef Py_INTERNAL_CEVAL_STATE_H
+#define Py_INTERNAL_CEVAL_STATE_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
+#endif
+
+
+#include "pycore_atomic.h" /* _Py_atomic_address */
+#include "pycore_gil.h" // struct _gil_runtime_state
+
+
+typedef enum {
+ PERF_STATUS_FAILED = -1, // Perf trampoline is in an invalid state
+ PERF_STATUS_NO_INIT = 0, // Perf trampoline is not initialized
+ PERF_STATUS_OK = 1, // Perf trampoline is ready to be executed
+} perf_status_t;
+
+
+#ifdef PY_HAVE_PERF_TRAMPOLINE
+struct code_arena_st;
+
+struct trampoline_api_st {
+ void* (*init_state)(void);
+ void (*write_state)(void* state, const void *code_addr,
+ unsigned int code_size, PyCodeObject* code);
+ int (*free_state)(void* state);
+ void *state;
+};
+#endif
+
+struct _ceval_runtime_state {
+ struct {
+#ifdef PY_HAVE_PERF_TRAMPOLINE
+ perf_status_t status;
+ Py_ssize_t extra_code_index;
+ struct code_arena_st *code_arena;
+ struct trampoline_api_st trampoline_api;
+ FILE *map_file;
+#else
+ int _not_used;
+#endif
+ } perf;
+ /* Request for checking signals. It is shared by all interpreters (see
+ bpo-40513). Any thread of any interpreter can receive a signal, but only
+ the main thread of the main interpreter can handle signals: see
+ _Py_ThreadCanHandleSignals(). */
+ _Py_atomic_int signals_pending;
+ struct _gil_runtime_state gil;
+};
+
+#ifdef PY_HAVE_PERF_TRAMPOLINE
+# define _PyEval_RUNTIME_PERF_INIT \
+ { \
+ .status = PERF_STATUS_NO_INIT, \
+ .extra_code_index = -1, \
+ }
+#else
+# define _PyEval_RUNTIME_PERF_INIT {0}
+#endif
+
+
+struct _pending_calls {
+ int busy;
+ PyThread_type_lock lock;
+ /* Request for running pending calls. */
+ _Py_atomic_int calls_to_do;
+ /* Request for looking at the `async_exc` field of the current
+ thread state.
+ Guarded by the GIL. */
+ int async_exc;
+#define NPENDINGCALLS 32
+ struct {
+ int (*func)(void *);
+ void *arg;
+ } calls[NPENDINGCALLS];
+ int first;
+ int last;
+};
+
+struct _ceval_state {
+ int recursion_limit;
+ /* This single variable consolidates all requests to break out of
+ the fast path in the eval loop. */
+ _Py_atomic_int eval_breaker;
+ /* Request for dropping the GIL */
+ _Py_atomic_int gil_drop_request;
+ /* The GC is ready to be executed */
+ _Py_atomic_int gc_scheduled;
+ struct _pending_calls pending;
+};
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_INTERNAL_CEVAL_STATE_H */
diff --git a/Include/internal/pycore_code.h b/Include/internal/pycore_code.h
index 80c1bfb6c9afa2..9e59fc98bf3d57 100644
--- a/Include/internal/pycore_code.h
+++ b/Include/internal/pycore_code.h
@@ -4,6 +4,8 @@
extern "C" {
#endif
+#define CODE_MAX_WATCHERS 8
+
/* PEP 659
* Specialization and quickening structs and helper functions
*/
@@ -16,53 +18,53 @@ extern "C" {
#define CACHE_ENTRIES(cache) (sizeof(cache)/sizeof(_Py_CODEUNIT))
typedef struct {
- _Py_CODEUNIT counter;
- _Py_CODEUNIT index;
- _Py_CODEUNIT module_keys_version[2];
- _Py_CODEUNIT builtin_keys_version;
+ uint16_t counter;
+ uint16_t index;
+ uint16_t module_keys_version[2];
+ uint16_t builtin_keys_version;
} _PyLoadGlobalCache;
#define INLINE_CACHE_ENTRIES_LOAD_GLOBAL CACHE_ENTRIES(_PyLoadGlobalCache)
typedef struct {
- _Py_CODEUNIT counter;
+ uint16_t counter;
} _PyBinaryOpCache;
#define INLINE_CACHE_ENTRIES_BINARY_OP CACHE_ENTRIES(_PyBinaryOpCache)
typedef struct {
- _Py_CODEUNIT counter;
+ uint16_t counter;
} _PyUnpackSequenceCache;
#define INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE \
CACHE_ENTRIES(_PyUnpackSequenceCache)
typedef struct {
- _Py_CODEUNIT counter;
- _Py_CODEUNIT mask;
+ uint16_t counter;
+ uint16_t mask;
} _PyCompareOpCache;
#define INLINE_CACHE_ENTRIES_COMPARE_OP CACHE_ENTRIES(_PyCompareOpCache)
typedef struct {
- _Py_CODEUNIT counter;
- _Py_CODEUNIT type_version[2];
- _Py_CODEUNIT func_version;
+ uint16_t counter;
+ uint16_t type_version[2];
+ uint16_t func_version;
} _PyBinarySubscrCache;
#define INLINE_CACHE_ENTRIES_BINARY_SUBSCR CACHE_ENTRIES(_PyBinarySubscrCache)
typedef struct {
- _Py_CODEUNIT counter;
- _Py_CODEUNIT version[2];
- _Py_CODEUNIT index;
+ uint16_t counter;
+ uint16_t version[2];
+ uint16_t index;
} _PyAttrCache;
typedef struct {
- _Py_CODEUNIT counter;
- _Py_CODEUNIT type_version[2];
- _Py_CODEUNIT keys_version[2];
- _Py_CODEUNIT descr[4];
+ uint16_t counter;
+ uint16_t type_version[2];
+ uint16_t keys_version[2];
+ uint16_t descr[4];
} _PyLoadMethodCache;
@@ -72,21 +74,21 @@ typedef struct {
#define INLINE_CACHE_ENTRIES_STORE_ATTR CACHE_ENTRIES(_PyAttrCache)
typedef struct {
- _Py_CODEUNIT counter;
- _Py_CODEUNIT func_version[2];
- _Py_CODEUNIT min_args;
+ uint16_t counter;
+ uint16_t func_version[2];
+ uint16_t min_args;
} _PyCallCache;
#define INLINE_CACHE_ENTRIES_CALL CACHE_ENTRIES(_PyCallCache)
typedef struct {
- _Py_CODEUNIT counter;
+ uint16_t counter;
} _PyStoreSubscrCache;
#define INLINE_CACHE_ENTRIES_STORE_SUBSCR CACHE_ENTRIES(_PyStoreSubscrCache)
typedef struct {
- _Py_CODEUNIT counter;
+ uint16_t counter;
} _PyForIterCache;
#define INLINE_CACHE_ENTRIES_FOR_ITER CACHE_ENTRIES(_PyForIterCache)
@@ -407,7 +409,7 @@ write_location_entry_start(uint8_t *ptr, int code, int length)
static inline uint16_t
adaptive_counter_bits(int value, int backoff) {
return (value << ADAPTIVE_BACKOFF_BITS) |
- (backoff & ((1<= 2 */
+ // XXX This should be freed during runtime fini.
+ struct Bigint *p5s;
+ struct Bigint *freelist[Bigint_Kmax+1];
+ double preallocated[Bigint_PREALLOC_SIZE];
+ double *preallocated_next;
};
#define _dtoa_runtime_state_INIT(runtime) \
{ \
diff --git a/Include/internal/pycore_faulthandler.h b/Include/internal/pycore_faulthandler.h
new file mode 100644
index 00000000000000..e6aec7745a6479
--- /dev/null
+++ b/Include/internal/pycore_faulthandler.h
@@ -0,0 +1,99 @@
+#ifndef Py_INTERNAL_FAULTHANDLER_H
+#define Py_INTERNAL_FAULTHANDLER_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
+#endif
+
+#ifdef HAVE_SIGACTION
+# include
+#endif
+
+
+#ifndef MS_WINDOWS
+ /* register() is useless on Windows, because only SIGSEGV, SIGABRT and
+ SIGILL can be handled by the process, and these signals can only be used
+ with enable(), not using register() */
+# define FAULTHANDLER_USER
+#endif
+
+
+#ifdef HAVE_SIGACTION
+/* Using an alternative stack requires sigaltstack()
+ and sigaction() SA_ONSTACK */
+# ifdef HAVE_SIGALTSTACK
+# define FAULTHANDLER_USE_ALT_STACK
+# endif
+typedef struct sigaction _Py_sighandler_t;
+#else
+typedef PyOS_sighandler_t _Py_sighandler_t;
+#endif // HAVE_SIGACTION
+
+
+#ifdef FAULTHANDLER_USER
+struct faulthandler_user_signal {
+ int enabled;
+ PyObject *file;
+ int fd;
+ int all_threads;
+ int chain;
+ _Py_sighandler_t previous;
+ PyInterpreterState *interp;
+};
+#endif /* FAULTHANDLER_USER */
+
+
+struct _faulthandler_runtime_state {
+ struct {
+ int enabled;
+ PyObject *file;
+ int fd;
+ int all_threads;
+ PyInterpreterState *interp;
+#ifdef MS_WINDOWS
+ void *exc_handler;
+#endif
+ } fatal_error;
+
+ struct {
+ PyObject *file;
+ int fd;
+ PY_TIMEOUT_T timeout_us; /* timeout in microseconds */
+ int repeat;
+ PyInterpreterState *interp;
+ int exit;
+ char *header;
+ size_t header_len;
+ /* The main thread always holds this lock. It is only released when
+ faulthandler_thread() is interrupted before this thread exits, or at
+ Python exit. */
+ PyThread_type_lock cancel_event;
+ /* released by child thread when joined */
+ PyThread_type_lock running;
+ } thread;
+
+#ifdef FAULTHANDLER_USER
+ struct faulthandler_user_signal *user_signals;
+#endif
+
+#ifdef FAULTHANDLER_USE_ALT_STACK
+ stack_t stack;
+ stack_t old_stack;
+#endif
+};
+
+#define _faulthandler_runtime_state_INIT \
+ { \
+ .fatal_error = { \
+ .fd = -1, \
+ }, \
+ }
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_INTERNAL_FAULTHANDLER_H */
diff --git a/Include/internal/pycore_global_objects_fini_generated.h b/Include/internal/pycore_global_objects_fini_generated.h
index 9951fa9951e67a..6aba2f19ebde4a 100644
--- a/Include/internal/pycore_global_objects_fini_generated.h
+++ b/Include/internal/pycore_global_objects_fini_generated.h
@@ -1051,6 +1051,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) {
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(node_offset));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(ns));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(nstype));
+ _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(nt));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(null));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(number));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(obj));
@@ -1089,6 +1090,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) {
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(pos));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(pos1));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(pos2));
+ _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(posix));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(print_file_and_line));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(priority));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(progress));
diff --git a/Include/internal/pycore_global_strings.h b/Include/internal/pycore_global_strings.h
index 12144b02f45574..acb9a4fbb92dce 100644
--- a/Include/internal/pycore_global_strings.h
+++ b/Include/internal/pycore_global_strings.h
@@ -537,6 +537,7 @@ struct _Py_global_strings {
STRUCT_FOR_ID(node_offset)
STRUCT_FOR_ID(ns)
STRUCT_FOR_ID(nstype)
+ STRUCT_FOR_ID(nt)
STRUCT_FOR_ID(null)
STRUCT_FOR_ID(number)
STRUCT_FOR_ID(obj)
@@ -575,6 +576,7 @@ struct _Py_global_strings {
STRUCT_FOR_ID(pos)
STRUCT_FOR_ID(pos1)
STRUCT_FOR_ID(pos2)
+ STRUCT_FOR_ID(posix)
STRUCT_FOR_ID(print_file_and_line)
STRUCT_FOR_ID(priority)
STRUCT_FOR_ID(progress)
diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h
index 532b28499080f2..ffda1351952d2a 100644
--- a/Include/internal/pycore_interp.h
+++ b/Include/internal/pycore_interp.h
@@ -12,6 +12,7 @@ extern "C" {
#include "pycore_atomic.h" // _Py_atomic_address
#include "pycore_ast_state.h" // struct ast_state
+#include "pycore_ceval_state.h" // struct _ceval_state
#include "pycore_code.h" // struct callable_cache
#include "pycore_context.h" // struct _Py_context_state
#include "pycore_dict_state.h" // struct _Py_dict_state
@@ -28,37 +29,6 @@ extern "C" {
#include "pycore_warnings.h" // struct _warnings_runtime_state
-struct _pending_calls {
- int busy;
- PyThread_type_lock lock;
- /* Request for running pending calls. */
- _Py_atomic_int calls_to_do;
- /* Request for looking at the `async_exc` field of the current
- thread state.
- Guarded by the GIL. */
- int async_exc;
-#define NPENDINGCALLS 32
- struct {
- int (*func)(void *);
- void *arg;
- } calls[NPENDINGCALLS];
- int first;
- int last;
-};
-
-struct _ceval_state {
- int recursion_limit;
- /* This single variable consolidates all requests to break out of
- the fast path in the eval loop. */
- _Py_atomic_int eval_breaker;
- /* Request for dropping the GIL */
- _Py_atomic_int gil_drop_request;
- /* The GC is ready to be executed */
- _Py_atomic_int gc_scheduled;
- struct _pending_calls pending;
-};
-
-
// atexit state
typedef struct {
PyObject *func;
@@ -191,6 +161,9 @@ struct _is {
PyObject *audit_hooks;
PyType_WatchCallback type_watchers[TYPE_MAX_WATCHERS];
+ PyCode_WatchCallback code_watchers[CODE_MAX_WATCHERS];
+ // One bit is set for each non-NULL entry in code_watchers
+ uint8_t active_code_watchers;
struct _Py_unicode_state unicode;
struct _Py_float_state float_state;
@@ -246,9 +219,10 @@ extern void _PyInterpreterState_Clear(PyThreadState *tstate);
struct _xidregitem;
struct _xidregitem {
- PyTypeObject *cls;
- crossinterpdatafunc getdata;
+ struct _xidregitem *prev;
struct _xidregitem *next;
+ PyObject *cls; // weakref to a PyTypeObject
+ crossinterpdatafunc getdata;
};
PyAPI_FUNC(PyInterpreterState*) _PyInterpreterState_LookUpID(int64_t);
diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h
index 33c8c0b75ea742..8796dfe2f6b8cf 100644
--- a/Include/internal/pycore_object.h
+++ b/Include/internal/pycore_object.h
@@ -373,7 +373,7 @@ PyAPI_FUNC(PyObject *) _PyObject_LookupSpecial(PyObject *, PyObject *);
* match.
*
* Third party code unintentionally rely on problematic fpcasts. The call
- * trampoline mitigates common occurences of bad fpcasts on Emscripten.
+ * trampoline mitigates common occurrences of bad fpcasts on Emscripten.
*/
#if defined(__EMSCRIPTEN__) && defined(PY_CALL_TRAMPOLINE)
#define _PyCFunction_TrampolineCall(meth, self, args) \
diff --git a/Include/internal/pycore_obmalloc.h b/Include/internal/pycore_obmalloc.h
index 93349d89c6ab52..a5c7f4528f9126 100644
--- a/Include/internal/pycore_obmalloc.h
+++ b/Include/internal/pycore_obmalloc.h
@@ -658,6 +658,7 @@ struct _obmalloc_usage {
struct _obmalloc_state {
+ int dump_debug_stats;
struct _obmalloc_pools pools;
struct _obmalloc_mgmt mgmt;
struct _obmalloc_usage usage;
diff --git a/Include/internal/pycore_obmalloc_init.h b/Include/internal/pycore_obmalloc_init.h
index c0fb057d06652b..c9f197e72de9f5 100644
--- a/Include/internal/pycore_obmalloc_init.h
+++ b/Include/internal/pycore_obmalloc_init.h
@@ -56,6 +56,7 @@ extern "C" {
#define _obmalloc_state_INIT(obmalloc) \
{ \
+ .dump_debug_stats = -1, \
.pools = { \
.used = _obmalloc_pools_INIT(obmalloc.pools), \
}, \
diff --git a/Include/internal/pycore_opcode.h b/Include/internal/pycore_opcode.h
index 0d31ca166a7d2e..da8a272f2fa2d0 100644
--- a/Include/internal/pycore_opcode.h
+++ b/Include/internal/pycore_opcode.h
@@ -125,6 +125,7 @@ const uint8_t _PyOpcode_Deopt[256] = {
[FOR_ITER_GEN] = FOR_ITER,
[FOR_ITER_LIST] = FOR_ITER,
[FOR_ITER_RANGE] = FOR_ITER,
+ [FOR_ITER_TUPLE] = FOR_ITER,
[GET_AITER] = GET_AITER,
[GET_ANEXT] = GET_ANEXT,
[GET_AWAITABLE] = GET_AWAITABLE,
@@ -293,31 +294,31 @@ static const char *const _PyOpcode_OpName[263] = {
[FOR_ITER_LIST] = "FOR_ITER_LIST",
[STORE_SUBSCR] = "STORE_SUBSCR",
[DELETE_SUBSCR] = "DELETE_SUBSCR",
- [FOR_ITER_RANGE] = "FOR_ITER_RANGE",
+ [FOR_ITER_TUPLE] = "FOR_ITER_TUPLE",
[STOPITERATION_ERROR] = "STOPITERATION_ERROR",
+ [FOR_ITER_RANGE] = "FOR_ITER_RANGE",
[FOR_ITER_GEN] = "FOR_ITER_GEN",
[LOAD_ATTR_CLASS] = "LOAD_ATTR_CLASS",
[LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN] = "LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN",
- [LOAD_ATTR_INSTANCE_VALUE] = "LOAD_ATTR_INSTANCE_VALUE",
[GET_ITER] = "GET_ITER",
[GET_YIELD_FROM_ITER] = "GET_YIELD_FROM_ITER",
[PRINT_EXPR] = "PRINT_EXPR",
[LOAD_BUILD_CLASS] = "LOAD_BUILD_CLASS",
+ [LOAD_ATTR_INSTANCE_VALUE] = "LOAD_ATTR_INSTANCE_VALUE",
[LOAD_ATTR_MODULE] = "LOAD_ATTR_MODULE",
- [LOAD_ATTR_PROPERTY] = "LOAD_ATTR_PROPERTY",
[LOAD_ASSERTION_ERROR] = "LOAD_ASSERTION_ERROR",
[RETURN_GENERATOR] = "RETURN_GENERATOR",
+ [LOAD_ATTR_PROPERTY] = "LOAD_ATTR_PROPERTY",
[LOAD_ATTR_SLOT] = "LOAD_ATTR_SLOT",
[LOAD_ATTR_WITH_HINT] = "LOAD_ATTR_WITH_HINT",
[LOAD_ATTR_METHOD_LAZY_DICT] = "LOAD_ATTR_METHOD_LAZY_DICT",
[LOAD_ATTR_METHOD_NO_DICT] = "LOAD_ATTR_METHOD_NO_DICT",
[LOAD_ATTR_METHOD_WITH_DICT] = "LOAD_ATTR_METHOD_WITH_DICT",
- [LOAD_ATTR_METHOD_WITH_VALUES] = "LOAD_ATTR_METHOD_WITH_VALUES",
[LIST_TO_TUPLE] = "LIST_TO_TUPLE",
[RETURN_VALUE] = "RETURN_VALUE",
[IMPORT_STAR] = "IMPORT_STAR",
[SETUP_ANNOTATIONS] = "SETUP_ANNOTATIONS",
- [LOAD_CONST__LOAD_FAST] = "LOAD_CONST__LOAD_FAST",
+ [LOAD_ATTR_METHOD_WITH_VALUES] = "LOAD_ATTR_METHOD_WITH_VALUES",
[ASYNC_GEN_WRAP] = "ASYNC_GEN_WRAP",
[PREP_RERAISE_STAR] = "PREP_RERAISE_STAR",
[POP_EXCEPT] = "POP_EXCEPT",
@@ -344,7 +345,7 @@ static const char *const _PyOpcode_OpName[263] = {
[JUMP_FORWARD] = "JUMP_FORWARD",
[JUMP_IF_FALSE_OR_POP] = "JUMP_IF_FALSE_OR_POP",
[JUMP_IF_TRUE_OR_POP] = "JUMP_IF_TRUE_OR_POP",
- [LOAD_FAST__LOAD_CONST] = "LOAD_FAST__LOAD_CONST",
+ [LOAD_CONST__LOAD_FAST] = "LOAD_CONST__LOAD_FAST",
[POP_JUMP_IF_FALSE] = "POP_JUMP_IF_FALSE",
[POP_JUMP_IF_TRUE] = "POP_JUMP_IF_TRUE",
[LOAD_GLOBAL] = "LOAD_GLOBAL",
@@ -352,7 +353,7 @@ static const char *const _PyOpcode_OpName[263] = {
[CONTAINS_OP] = "CONTAINS_OP",
[RERAISE] = "RERAISE",
[COPY] = "COPY",
- [LOAD_FAST__LOAD_FAST] = "LOAD_FAST__LOAD_FAST",
+ [LOAD_FAST__LOAD_CONST] = "LOAD_FAST__LOAD_CONST",
[BINARY_OP] = "BINARY_OP",
[SEND] = "SEND",
[LOAD_FAST] = "LOAD_FAST",
@@ -372,9 +373,9 @@ static const char *const _PyOpcode_OpName[263] = {
[STORE_DEREF] = "STORE_DEREF",
[DELETE_DEREF] = "DELETE_DEREF",
[JUMP_BACKWARD] = "JUMP_BACKWARD",
- [LOAD_GLOBAL_BUILTIN] = "LOAD_GLOBAL_BUILTIN",
+ [LOAD_FAST__LOAD_FAST] = "LOAD_FAST__LOAD_FAST",
[CALL_FUNCTION_EX] = "CALL_FUNCTION_EX",
- [LOAD_GLOBAL_MODULE] = "LOAD_GLOBAL_MODULE",
+ [LOAD_GLOBAL_BUILTIN] = "LOAD_GLOBAL_BUILTIN",
[EXTENDED_ARG] = "EXTENDED_ARG",
[LIST_APPEND] = "LIST_APPEND",
[SET_ADD] = "SET_ADD",
@@ -384,24 +385,24 @@ static const char *const _PyOpcode_OpName[263] = {
[YIELD_VALUE] = "YIELD_VALUE",
[RESUME] = "RESUME",
[MATCH_CLASS] = "MATCH_CLASS",
+ [LOAD_GLOBAL_MODULE] = "LOAD_GLOBAL_MODULE",
[STORE_ATTR_INSTANCE_VALUE] = "STORE_ATTR_INSTANCE_VALUE",
- [STORE_ATTR_SLOT] = "STORE_ATTR_SLOT",
[FORMAT_VALUE] = "FORMAT_VALUE",
[BUILD_CONST_KEY_MAP] = "BUILD_CONST_KEY_MAP",
[BUILD_STRING] = "BUILD_STRING",
+ [STORE_ATTR_SLOT] = "STORE_ATTR_SLOT",
[STORE_ATTR_WITH_HINT] = "STORE_ATTR_WITH_HINT",
[STORE_FAST__LOAD_FAST] = "STORE_FAST__LOAD_FAST",
[STORE_FAST__STORE_FAST] = "STORE_FAST__STORE_FAST",
- [STORE_SUBSCR_DICT] = "STORE_SUBSCR_DICT",
[LIST_EXTEND] = "LIST_EXTEND",
[SET_UPDATE] = "SET_UPDATE",
[DICT_MERGE] = "DICT_MERGE",
[DICT_UPDATE] = "DICT_UPDATE",
+ [STORE_SUBSCR_DICT] = "STORE_SUBSCR_DICT",
[STORE_SUBSCR_LIST_INT] = "STORE_SUBSCR_LIST_INT",
[UNPACK_SEQUENCE_LIST] = "UNPACK_SEQUENCE_LIST",
[UNPACK_SEQUENCE_TUPLE] = "UNPACK_SEQUENCE_TUPLE",
[UNPACK_SEQUENCE_TWO_TUPLE] = "UNPACK_SEQUENCE_TWO_TUPLE",
- [170] = "<170>",
[CALL] = "CALL",
[KW_NAMES] = "KW_NAMES",
[173] = "<173>",
@@ -498,7 +499,6 @@ static const char *const _PyOpcode_OpName[263] = {
#endif
#define EXTRA_CASES \
- case 170: \
case 173: \
case 174: \
case 175: \
diff --git a/Include/internal/pycore_parser.h b/Include/internal/pycore_parser.h
index e2de24e2ca9734..2d2b56bd824cb4 100644
--- a/Include/internal/pycore_parser.h
+++ b/Include/internal/pycore_parser.h
@@ -8,12 +8,31 @@ extern "C" {
# error "this header requires Py_BUILD_CORE define"
#endif
+
+#include "pycore_pyarena.h" // PyArena
+
+
+#ifdef Py_DEBUG
+#define _PYPEGEN_NSTATISTICS 2000
+#endif
+
+struct _parser_runtime_state {
+#ifdef Py_DEBUG
+ long memo_statistics[_PYPEGEN_NSTATISTICS];
+#else
+ int _not_used;
+#endif
+};
+
+
+
extern struct _mod* _PyParser_ASTFromString(
const char *str,
PyObject* filename,
int mode,
PyCompilerFlags *flags,
PyArena *arena);
+
extern struct _mod* _PyParser_ASTFromFile(
FILE *fp,
PyObject *filename_ob,
@@ -25,6 +44,7 @@ extern struct _mod* _PyParser_ASTFromFile(
int *errcode,
PyArena *arena);
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/internal/pycore_pylifecycle.h b/Include/internal/pycore_pylifecycle.h
index 4c0ffa7a9b1ab7..370e4cbd59f976 100644
--- a/Include/internal/pycore_pylifecycle.h
+++ b/Include/internal/pycore_pylifecycle.h
@@ -44,6 +44,7 @@ extern void _PySys_Fini(PyInterpreterState *interp);
extern int _PyBuiltins_AddExceptions(PyObject * bltinmod);
extern PyStatus _Py_HashRandomization_Init(const PyConfig *);
+extern PyStatus _PyTime_Init(void);
extern PyStatus _PyImportZip_Init(PyThreadState *tstate);
extern PyStatus _PyGC_Init(PyInterpreterState *interp);
extern PyStatus _PyAtExit_Init(PyInterpreterState *interp);
diff --git a/Include/internal/pycore_pymem.h b/Include/internal/pycore_pymem.h
index 5749af7465f6f0..4cc953d8d779c9 100644
--- a/Include/internal/pycore_pymem.h
+++ b/Include/internal/pycore_pymem.h
@@ -90,28 +90,6 @@ PyAPI_FUNC(int) _PyMem_GetAllocatorName(
PYMEM_ALLOCATOR_NOT_SET does nothing. */
PyAPI_FUNC(int) _PyMem_SetupAllocators(PyMemAllocatorName allocator);
-struct _PyTraceMalloc_Config {
- /* Module initialized?
- Variable protected by the GIL */
- enum {
- TRACEMALLOC_NOT_INITIALIZED,
- TRACEMALLOC_INITIALIZED,
- TRACEMALLOC_FINALIZED
- } initialized;
-
- /* Is tracemalloc tracing memory allocations?
- Variable protected by the GIL */
- int tracing;
-
- /* limit of the number of frames in a traceback, 1 by default.
- Variable protected by the GIL. */
- int max_nframe;
-};
-
-#define _PyTraceMalloc_Config_INIT \
- {.initialized = TRACEMALLOC_NOT_INITIALIZED, \
- .tracing = 0, \
- .max_nframe = 1}
#ifdef __cplusplus
}
diff --git a/Include/internal/pycore_pythread.h b/Include/internal/pycore_pythread.h
new file mode 100644
index 00000000000000..f53921494c158f
--- /dev/null
+++ b/Include/internal/pycore_pythread.h
@@ -0,0 +1,81 @@
+#ifndef Py_INTERNAL_PYTHREAD_H
+#define Py_INTERNAL_PYTHREAD_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
+#endif
+
+
+#ifndef _POSIX_THREADS
+/* This means pthreads are not implemented in libc headers, hence the macro
+ not present in unistd.h. But they still can be implemented as an external
+ library (e.g. gnu pth in pthread emulation) */
+# ifdef HAVE_PTHREAD_H
+# include /* _POSIX_THREADS */
+# endif
+# ifndef _POSIX_THREADS
+/* Check if we're running on HP-UX and _SC_THREADS is defined. If so, then
+ enough of the Posix threads package is implemented to support python
+ threads.
+
+ This is valid for HP-UX 11.23 running on an ia64 system. If needed, add
+ a check of __ia64 to verify that we're running on an ia64 system instead
+ of a pa-risc system.
+*/
+# ifdef __hpux
+# ifdef _SC_THREADS
+# define _POSIX_THREADS
+# endif
+# endif
+# endif /* _POSIX_THREADS */
+#endif /* _POSIX_THREADS */
+
+#if defined(_POSIX_THREADS) || defined(HAVE_PTHREAD_STUBS)
+# define _USE_PTHREADS
+#endif
+
+#if defined(_USE_PTHREADS) && defined(HAVE_PTHREAD_CONDATTR_SETCLOCK) && defined(HAVE_CLOCK_GETTIME) && defined(CLOCK_MONOTONIC)
+// monotonic is supported statically. It doesn't mean it works on runtime.
+# define CONDATTR_MONOTONIC
+#endif
+
+
+#if defined(HAVE_PTHREAD_STUBS)
+// pthread_key
+struct py_stub_tls_entry {
+ bool in_use;
+ void *value;
+};
+#endif
+
+struct _pythread_runtime_state {
+ int initialized;
+
+#ifdef _USE_PTHREADS
+ // This matches when thread_pthread.h is used.
+ struct {
+ /* NULL when pthread_condattr_setclock(CLOCK_MONOTONIC) is not supported. */
+ pthread_condattr_t *ptr;
+# ifdef CONDATTR_MONOTONIC
+ /* The value to which condattr_monotonic is set. */
+ pthread_condattr_t val;
+# endif
+ } _condattr_monotonic;
+
+#endif // USE_PTHREADS
+
+#if defined(HAVE_PTHREAD_STUBS)
+ struct {
+ struct py_stub_tls_entry tls_entries[PTHREAD_KEYS_MAX];
+ } stubs;
+#endif
+};
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_INTERNAL_PYTHREAD_H */
diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h
index c1829cb1bdadeb..92ed45956c99b3 100644
--- a/Include/internal/pycore_runtime.h
+++ b/Include/internal/pycore_runtime.h
@@ -9,17 +9,23 @@ extern "C" {
#endif
#include "pycore_atomic.h" /* _Py_atomic_address */
+#include "pycore_ceval_state.h" // struct _ceval_runtime_state
#include "pycore_dict_state.h" // struct _Py_dict_runtime_state
#include "pycore_dtoa.h" // struct _dtoa_runtime_state
#include "pycore_floatobject.h" // struct _Py_float_runtime_state
+#include "pycore_faulthandler.h" // struct _faulthandler_runtime_state
#include "pycore_function.h" // struct _func_runtime_state
-#include "pycore_gil.h" // struct _gil_runtime_state
#include "pycore_global_objects.h" // struct _Py_global_objects
#include "pycore_import.h" // struct _import_runtime_state
#include "pycore_interp.h" // PyInterpreterState
+#include "pycore_parser.h" // struct _parser_runtime_state
#include "pycore_pymem.h" // struct _pymem_allocators
#include "pycore_pyhash.h" // struct pyhash_runtime_state
+#include "pycore_pythread.h" // struct _pythread_runtime_state
#include "pycore_obmalloc.h" // struct obmalloc_state
+#include "pycore_signal.h" // struct _signals_runtime_state
+#include "pycore_time.h" // struct _time_runtime_state
+#include "pycore_tracemalloc.h" // struct _tracemalloc_runtime_state
#include "pycore_unicodeobject.h" // struct _Py_unicode_runtime_ids
struct _getargs_runtime_state {
@@ -29,15 +35,6 @@ struct _getargs_runtime_state {
/* ceval state */
-struct _ceval_runtime_state {
- /* Request for checking signals. It is shared by all interpreters (see
- bpo-40513). Any thread of any interpreter can receive a signal, but only
- the main thread of the main interpreter can handle signals: see
- _Py_ThreadCanHandleSignals(). */
- _Py_atomic_int signals_pending;
- struct _gil_runtime_state gil;
-};
-
/* GIL state */
struct _gilstate_runtime_state {
@@ -97,11 +94,9 @@ typedef struct pyruntimestate {
struct _pymem_allocators allocators;
struct _obmalloc_state obmalloc;
struct pyhash_runtime_state pyhash_state;
- struct {
- /* True if the main interpreter thread exited due to an unhandled
- * KeyboardInterrupt exception, suggesting the user pressed ^C. */
- int unhandled_keyboard_interrupt;
- } signals;
+ struct _time_runtime_state time;
+ struct _pythread_runtime_state threads;
+ struct _signals_runtime_state signals;
struct pyinterpreters {
PyThread_type_lock mutex;
@@ -129,6 +124,10 @@ typedef struct pyruntimestate {
unsigned long main_thread;
+ PyWideStringList orig_argv;
+
+ struct _parser_runtime_state parser;
+
#define NEXITFUNCS 32
void (*exitfuncs[NEXITFUNCS])(void);
int nexitfuncs;
@@ -137,11 +136,10 @@ typedef struct pyruntimestate {
struct _ceval_runtime_state ceval;
struct _gilstate_runtime_state gilstate;
struct _getargs_runtime_state getargs;
- struct {
- struct _PyTraceMalloc_Config config;
- } tracemalloc;
struct _dtoa_runtime_state dtoa;
struct _fileutils_state fileutils;
+ struct _faulthandler_runtime_state faulthandler;
+ struct _tracemalloc_runtime_state tracemalloc;
PyPreConfig preconfig;
diff --git a/Include/internal/pycore_runtime_init.h b/Include/internal/pycore_runtime_init.h
index ab53876e355fd8..1431096e2d24ba 100644
--- a/Include/internal/pycore_runtime_init.h
+++ b/Include/internal/pycore_runtime_init.h
@@ -26,6 +26,7 @@ extern "C" {
}, \
.obmalloc = _obmalloc_state_INIT(runtime.obmalloc), \
.pyhash_state = pyhash_state_INIT, \
+ .signals = _signals_RUNTIME_INIT, \
.interpreters = { \
/* This prevents interpreters from getting created \
until _PyInterpreterState_Enable() is called. */ \
@@ -41,19 +42,21 @@ extern "C" {
.header = 1, \
}, \
}, \
+ .ceval = { \
+ .perf = _PyEval_RUNTIME_PERF_INIT, \
+ }, \
.gilstate = { \
.check_enabled = 1, \
/* A TSS key must be initialized with Py_tss_NEEDS_INIT \
in accordance with the specification. */ \
.autoTSSkey = Py_tss_NEEDS_INIT, \
}, \
- .tracemalloc = { \
- .config = _PyTraceMalloc_Config_INIT, \
- }, \
.dtoa = _dtoa_runtime_state_INIT(runtime), \
.fileutils = { \
.force_ascii = -1, \
}, \
+ .faulthandler = _faulthandler_runtime_state_INIT, \
+ .tracemalloc = _tracemalloc_runtime_state_INIT, \
.float_state = { \
.float_format = _py_float_format_unknown, \
.double_format = _py_float_format_unknown, \
diff --git a/Include/internal/pycore_runtime_init_generated.h b/Include/internal/pycore_runtime_init_generated.h
index 87b0f2ed8dfa8c..6d1b8702c77698 100644
--- a/Include/internal/pycore_runtime_init_generated.h
+++ b/Include/internal/pycore_runtime_init_generated.h
@@ -1043,6 +1043,7 @@ extern "C" {
INIT_ID(node_offset), \
INIT_ID(ns), \
INIT_ID(nstype), \
+ INIT_ID(nt), \
INIT_ID(null), \
INIT_ID(number), \
INIT_ID(obj), \
@@ -1081,6 +1082,7 @@ extern "C" {
INIT_ID(pos), \
INIT_ID(pos1), \
INIT_ID(pos2), \
+ INIT_ID(posix), \
INIT_ID(print_file_and_line), \
INIT_ID(priority), \
INIT_ID(progress), \
diff --git a/Include/internal/pycore_signal.h b/Include/internal/pycore_signal.h
index b921dd170e9f6f..ca3f69d09fc0c1 100644
--- a/Include/internal/pycore_signal.h
+++ b/Include/internal/pycore_signal.h
@@ -10,8 +10,11 @@ extern "C" {
# error "this header requires Py_BUILD_CORE define"
#endif
+#include "pycore_atomic.h" // _Py_atomic_address
+
#include // NSIG
+
#ifdef _SIG_MAXSIG
// gh-91145: On FreeBSD, defines NSIG as 32: it doesn't include
// realtime signals: [SIGRTMIN,SIGRTMAX]. Use _SIG_MAXSIG instead. For
@@ -29,6 +32,66 @@ extern "C" {
# define Py_NSIG 64 // Use a reasonable default value
#endif
+#define INVALID_FD (-1)
+
+struct _signals_runtime_state {
+ volatile struct {
+ _Py_atomic_int tripped;
+ /* func is atomic to ensure that PyErr_SetInterrupt is async-signal-safe
+ * (even though it would probably be otherwise, anyway).
+ */
+ _Py_atomic_address func;
+ } handlers[Py_NSIG];
+
+ volatile struct {
+#ifdef MS_WINDOWS
+ /* This would be "SOCKET fd" if were always included.
+ It isn't so we must cast to SOCKET where appropriate. */
+ volatile int fd;
+#elif defined(__VXWORKS__)
+ int fd;
+#else
+ sig_atomic_t fd;
+#endif
+
+ int warn_on_full_buffer;
+#ifdef MS_WINDOWS
+ int use_send;
+#endif
+ } wakeup;
+
+ /* Speed up sigcheck() when none tripped */
+ _Py_atomic_int is_tripped;
+
+ /* These objects necessarily belong to the main interpreter. */
+ PyObject *default_handler;
+ PyObject *ignore_handler;
+
+#ifdef MS_WINDOWS
+ /* This would be "HANDLE sigint_event" if were always included.
+ It isn't so we must cast to HANDLE everywhere "sigint_event" is used. */
+ void *sigint_event;
+#endif
+
+ /* True if the main interpreter thread exited due to an unhandled
+ * KeyboardInterrupt exception, suggesting the user pressed ^C. */
+ int unhandled_keyboard_interrupt;
+};
+
+#ifdef MS_WINDOWS
+# define _signals_WAKEUP_INIT \
+ {.fd = INVALID_FD, .warn_on_full_buffer = 1, .use_send = 0}
+#else
+# define _signals_WAKEUP_INIT \
+ {.fd = INVALID_FD, .warn_on_full_buffer = 1}
+#endif
+
+#define _signals_RUNTIME_INIT \
+ { \
+ .wakeup = _signals_WAKEUP_INIT, \
+ }
+
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/internal/pycore_time.h b/Include/internal/pycore_time.h
new file mode 100644
index 00000000000000..949170c4493799
--- /dev/null
+++ b/Include/internal/pycore_time.h
@@ -0,0 +1,25 @@
+#ifndef Py_INTERNAL_TIME_H
+#define Py_INTERNAL_TIME_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
+#endif
+
+
+struct _time_runtime_state {
+#ifdef HAVE_TIMES
+ int ticks_per_second_initialized;
+ long ticks_per_second;
+#else
+ int _not_used;
+#endif
+};
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_INTERNAL_TIME_H */
diff --git a/Include/internal/pycore_tracemalloc.h b/Include/internal/pycore_tracemalloc.h
new file mode 100644
index 00000000000000..08d7d1096c78ce
--- /dev/null
+++ b/Include/internal/pycore_tracemalloc.h
@@ -0,0 +1,121 @@
+#ifndef Py_INTERNAL_TRACEMALLOC_H
+#define Py_INTERNAL_TRACEMALLOC_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
+#endif
+
+#include "pycore_hashtable.h" // _Py_hashtable_t
+
+
+/* Trace memory blocks allocated by PyMem_RawMalloc() */
+#define TRACE_RAW_MALLOC
+
+
+struct _PyTraceMalloc_Config {
+ /* Module initialized?
+ Variable protected by the GIL */
+ enum {
+ TRACEMALLOC_NOT_INITIALIZED,
+ TRACEMALLOC_INITIALIZED,
+ TRACEMALLOC_FINALIZED
+ } initialized;
+
+ /* Is tracemalloc tracing memory allocations?
+ Variable protected by the GIL */
+ int tracing;
+
+ /* limit of the number of frames in a traceback, 1 by default.
+ Variable protected by the GIL. */
+ int max_nframe;
+};
+
+
+/* Pack the frame_t structure to reduce the memory footprint on 64-bit
+ architectures: 12 bytes instead of 16. */
+struct
+#ifdef __GNUC__
+__attribute__((packed))
+#elif defined(_MSC_VER)
+#pragma pack(push, 4)
+#endif
+tracemalloc_frame {
+ /* filename cannot be NULL: "" is used if the Python frame
+ filename is NULL */
+ PyObject *filename;
+ unsigned int lineno;
+};
+#ifdef _MSC_VER
+#pragma pack(pop)
+#endif
+
+struct tracemalloc_traceback {
+ Py_uhash_t hash;
+ /* Number of frames stored */
+ uint16_t nframe;
+ /* Total number of frames the traceback had */
+ uint16_t total_nframe;
+ struct tracemalloc_frame frames[1];
+};
+
+
+struct _tracemalloc_runtime_state {
+ struct _PyTraceMalloc_Config config;
+
+ /* Protected by the GIL */
+ struct {
+ PyMemAllocatorEx mem;
+ PyMemAllocatorEx raw;
+ PyMemAllocatorEx obj;
+ } allocators;
+
+#if defined(TRACE_RAW_MALLOC)
+ PyThread_type_lock tables_lock;
+#endif
+ /* Size in bytes of currently traced memory.
+ Protected by TABLES_LOCK(). */
+ size_t traced_memory;
+ /* Peak size in bytes of traced memory.
+ Protected by TABLES_LOCK(). */
+ size_t peak_traced_memory;
+ /* Hash table used as a set to intern filenames:
+ PyObject* => PyObject*.
+ Protected by the GIL */
+ _Py_hashtable_t *filenames;
+ /* Buffer to store a new traceback in traceback_new().
+ Protected by the GIL. */
+ struct tracemalloc_traceback *traceback;
+ /* Hash table used as a set to intern tracebacks:
+ traceback_t* => traceback_t*
+ Protected by the GIL */
+ _Py_hashtable_t *tracebacks;
+ /* pointer (void*) => trace (trace_t*).
+ Protected by TABLES_LOCK(). */
+ _Py_hashtable_t *traces;
+ /* domain (unsigned int) => traces (_Py_hashtable_t).
+ Protected by TABLES_LOCK(). */
+ _Py_hashtable_t *domains;
+
+ struct tracemalloc_traceback empty_traceback;
+
+ Py_tss_t reentrant_key;
+};
+
+#define _tracemalloc_runtime_state_INIT \
+ { \
+ .config = { \
+ .initialized = TRACEMALLOC_NOT_INITIALIZED, \
+ .tracing = 0, \
+ .max_nframe = 1, \
+ }, \
+ .reentrant_key = Py_tss_NEEDS_INIT, \
+ }
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif // !Py_INTERNAL_TRACEMALLOC_H
diff --git a/Include/internal/pycore_tuple.h b/Include/internal/pycore_tuple.h
index 504c36338d9e96..edc70843b57531 100644
--- a/Include/internal/pycore_tuple.h
+++ b/Include/internal/pycore_tuple.h
@@ -67,6 +67,13 @@ struct _Py_tuple_state {
extern PyObject *_PyTuple_FromArray(PyObject *const *, Py_ssize_t);
extern PyObject *_PyTuple_FromArraySteal(PyObject *const *, Py_ssize_t);
+
+typedef struct {
+ PyObject_HEAD
+ Py_ssize_t it_index;
+ PyTupleObject *it_seq; /* Set to NULL when iterator is exhausted */
+} _PyTupleIterObject;
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/internal/pycore_unicodeobject.h b/Include/internal/pycore_unicodeobject.h
index b315ca1ae5b64b..19faceebf1d8ee 100644
--- a/Include/internal/pycore_unicodeobject.h
+++ b/Include/internal/pycore_unicodeobject.h
@@ -9,6 +9,7 @@ extern "C" {
#endif
#include "pycore_fileutils.h" // _Py_error_handler
+#include "pycore_ucnhash.h" // _PyUnicode_Name_CAPI
void _PyUnicode_ExactDealloc(PyObject *op);
@@ -52,6 +53,8 @@ struct _Py_unicode_ids {
struct _Py_unicode_state {
struct _Py_unicode_fs_codec fs_codec;
+ _PyUnicode_Name_CAPI *ucnhash_capi;
+
// Unicode identifiers (_Py_Identifier): see _PyUnicode_FromId()
struct _Py_unicode_ids ids;
};
diff --git a/Include/internal/pycore_unicodeobject_generated.h b/Include/internal/pycore_unicodeobject_generated.h
index 80be342b5b3b44..7f407c0141b8a5 100644
--- a/Include/internal/pycore_unicodeobject_generated.h
+++ b/Include/internal/pycore_unicodeobject_generated.h
@@ -980,6 +980,8 @@ _PyUnicode_InitStaticStrings(void) {
PyUnicode_InternInPlace(&string);
string = &_Py_ID(nstype);
PyUnicode_InternInPlace(&string);
+ string = &_Py_ID(nt);
+ PyUnicode_InternInPlace(&string);
string = &_Py_ID(null);
PyUnicode_InternInPlace(&string);
string = &_Py_ID(number);
@@ -1056,6 +1058,8 @@ _PyUnicode_InitStaticStrings(void) {
PyUnicode_InternInPlace(&string);
string = &_Py_ID(pos2);
PyUnicode_InternInPlace(&string);
+ string = &_Py_ID(posix);
+ PyUnicode_InternInPlace(&string);
string = &_Py_ID(print_file_and_line);
PyUnicode_InternInPlace(&string);
string = &_Py_ID(priority);
diff --git a/Include/object.h b/Include/object.h
index 75624fe8c77a51..3774f126730005 100644
--- a/Include/object.h
+++ b/Include/object.h
@@ -598,15 +598,44 @@ static inline void Py_DECREF(PyObject *op)
* one of those can't cause problems -- but in part that relies on that
* Python integers aren't currently weakly referencable. Best practice is
* to use Py_CLEAR() even if you can't think of a reason for why you need to.
+ *
+ * gh-98724: Use a temporary variable to only evaluate the macro argument once,
+ * to avoid the duplication of side effects if the argument has side effects.
+ *
+ * gh-99701: If the PyObject* type is used with casting arguments to PyObject*,
+ * the code can be miscompiled with strict aliasing because of type punning.
+ * With strict aliasing, a compiler considers that two pointers of different
+ * types cannot read or write the same memory which enables optimization
+ * opportunities.
+ *
+ * If available, use _Py_TYPEOF() to use the 'op' type for temporary variables,
+ * and so avoid type punning. Otherwise, use memcpy() which causes type erasure
+ * and so prevents the compiler to reuse an old cached 'op' value after
+ * Py_CLEAR().
*/
-#define Py_CLEAR(op) \
- do { \
- PyObject *_py_tmp = _PyObject_CAST(op); \
- if (_py_tmp != NULL) { \
- (op) = NULL; \
- Py_DECREF(_py_tmp); \
- } \
+#ifdef _Py_TYPEOF
+#define Py_CLEAR(op) \
+ do { \
+ _Py_TYPEOF(op)* _tmp_op_ptr = &(op); \
+ _Py_TYPEOF(op) _tmp_old_op = (*_tmp_op_ptr); \
+ if (_tmp_old_op != NULL) { \
+ *_tmp_op_ptr = _Py_NULL; \
+ Py_DECREF(_tmp_old_op); \
+ } \
} while (0)
+#else
+#define Py_CLEAR(op) \
+ do { \
+ PyObject **_tmp_op_ptr = _Py_CAST(PyObject**, &(op)); \
+ PyObject *_tmp_old_op = (*_tmp_op_ptr); \
+ if (_tmp_old_op != NULL) { \
+ PyObject *_null_ptr = _Py_NULL; \
+ memcpy(_tmp_op_ptr, &_null_ptr, sizeof(PyObject*)); \
+ Py_DECREF(_tmp_old_op); \
+ } \
+ } while (0)
+#endif
+
/* Function to use in case the object pointer can be NULL: */
static inline void Py_XINCREF(PyObject *op)
diff --git a/Include/opcode.h b/Include/opcode.h
index f284313d2ed756..888250ed37e8cb 100644
--- a/Include/opcode.h
+++ b/Include/opcode.h
@@ -162,34 +162,35 @@ extern "C" {
#define COMPARE_OP_INT_JUMP 57
#define COMPARE_OP_STR_JUMP 58
#define FOR_ITER_LIST 59
-#define FOR_ITER_RANGE 62
-#define FOR_ITER_GEN 64
-#define LOAD_ATTR_CLASS 65
-#define LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN 66
-#define LOAD_ATTR_INSTANCE_VALUE 67
-#define LOAD_ATTR_MODULE 72
-#define LOAD_ATTR_PROPERTY 73
-#define LOAD_ATTR_SLOT 76
-#define LOAD_ATTR_WITH_HINT 77
-#define LOAD_ATTR_METHOD_LAZY_DICT 78
-#define LOAD_ATTR_METHOD_NO_DICT 79
-#define LOAD_ATTR_METHOD_WITH_DICT 80
-#define LOAD_ATTR_METHOD_WITH_VALUES 81
-#define LOAD_CONST__LOAD_FAST 86
-#define LOAD_FAST__LOAD_CONST 113
-#define LOAD_FAST__LOAD_FAST 121
-#define LOAD_GLOBAL_BUILTIN 141
-#define LOAD_GLOBAL_MODULE 143
-#define STORE_ATTR_INSTANCE_VALUE 153
-#define STORE_ATTR_SLOT 154
-#define STORE_ATTR_WITH_HINT 158
-#define STORE_FAST__LOAD_FAST 159
-#define STORE_FAST__STORE_FAST 160
-#define STORE_SUBSCR_DICT 161
-#define STORE_SUBSCR_LIST_INT 166
-#define UNPACK_SEQUENCE_LIST 167
-#define UNPACK_SEQUENCE_TUPLE 168
-#define UNPACK_SEQUENCE_TWO_TUPLE 169
+#define FOR_ITER_TUPLE 62
+#define FOR_ITER_RANGE 64
+#define FOR_ITER_GEN 65
+#define LOAD_ATTR_CLASS 66
+#define LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN 67
+#define LOAD_ATTR_INSTANCE_VALUE 72
+#define LOAD_ATTR_MODULE 73
+#define LOAD_ATTR_PROPERTY 76
+#define LOAD_ATTR_SLOT 77
+#define LOAD_ATTR_WITH_HINT 78
+#define LOAD_ATTR_METHOD_LAZY_DICT 79
+#define LOAD_ATTR_METHOD_NO_DICT 80
+#define LOAD_ATTR_METHOD_WITH_DICT 81
+#define LOAD_ATTR_METHOD_WITH_VALUES 86
+#define LOAD_CONST__LOAD_FAST 113
+#define LOAD_FAST__LOAD_CONST 121
+#define LOAD_FAST__LOAD_FAST 141
+#define LOAD_GLOBAL_BUILTIN 143
+#define LOAD_GLOBAL_MODULE 153
+#define STORE_ATTR_INSTANCE_VALUE 154
+#define STORE_ATTR_SLOT 158
+#define STORE_ATTR_WITH_HINT 159
+#define STORE_FAST__LOAD_FAST 160
+#define STORE_FAST__STORE_FAST 161
+#define STORE_SUBSCR_DICT 166
+#define STORE_SUBSCR_LIST_INT 167
+#define UNPACK_SEQUENCE_LIST 168
+#define UNPACK_SEQUENCE_TUPLE 169
+#define UNPACK_SEQUENCE_TWO_TUPLE 170
#define DO_TRACING 255
#define HAS_ARG(op) ((((op) >= HAVE_ARGUMENT) && (!IS_PSEUDO_OPCODE(op)))\
diff --git a/Include/patchlevel.h b/Include/patchlevel.h
index a16b8d7104e3b6..3a7e3b47a88501 100644
--- a/Include/patchlevel.h
+++ b/Include/patchlevel.h
@@ -20,10 +20,10 @@
#define PY_MINOR_VERSION 12
#define PY_MICRO_VERSION 0
#define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_ALPHA
-#define PY_RELEASE_SERIAL 2
+#define PY_RELEASE_SERIAL 3
/* Version as a string */
-#define PY_VERSION "3.12.0a2+"
+#define PY_VERSION "3.12.0a3+"
/*--end constants--*/
/* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2.
diff --git a/Include/pyport.h b/Include/pyport.h
index b3ff2f4882e90f..b1b2a74779691d 100644
--- a/Include/pyport.h
+++ b/Include/pyport.h
@@ -698,6 +698,15 @@ extern char * _getpty(int *, int, mode_t, int);
# define _Py__has_builtin(x) 0
#endif
+// _Py_TYPEOF(expr) gets the type of an expression.
+//
+// Example: _Py_TYPEOF(x) x_copy = (x);
+//
+// The macro is only defined if GCC or clang compiler is used.
+#if defined(__GNUC__) || defined(__clang__)
+# define _Py_TYPEOF(expr) __typeof__(expr)
+#endif
+
/* A convenient way for code to know if sanitizers are enabled. */
#if defined(__has_feature)
diff --git a/Lib/asyncio/events.py b/Lib/asyncio/events.py
index 2836bbcc463fe5..34a8869dff8def 100644
--- a/Lib/asyncio/events.py
+++ b/Lib/asyncio/events.py
@@ -619,7 +619,7 @@ def get_event_loop(self):
Returns an event loop object implementing the BaseEventLoop interface,
or raises an exception in case no event loop has been set for the
- current context and the current policy does not specify to create one.
+ current context.
It should never return None."""
raise NotImplementedError
@@ -672,11 +672,6 @@ def get_event_loop(self):
Returns an instance of EventLoop or raises an exception.
"""
- if (self._local._loop is None and
- not self._local._set_called and
- threading.current_thread() is threading.main_thread()):
- self.set_event_loop(self.new_event_loop())
-
if self._local._loop is None:
raise RuntimeError('There is no current event loop in thread %r.'
% threading.current_thread().name)
@@ -786,16 +781,9 @@ def get_event_loop():
the result of `get_event_loop_policy().get_event_loop()` call.
"""
# NOTE: this function is implemented in C (see _asynciomodule.c)
- return _py__get_event_loop()
-
-
-def _get_event_loop(stacklevel=3):
current_loop = _get_running_loop()
if current_loop is not None:
return current_loop
- import warnings
- warnings.warn('There is no current event loop',
- DeprecationWarning, stacklevel=stacklevel)
return get_event_loop_policy().get_event_loop()
@@ -825,7 +813,6 @@ def set_child_watcher(watcher):
_py__set_running_loop = _set_running_loop
_py_get_running_loop = get_running_loop
_py_get_event_loop = get_event_loop
-_py__get_event_loop = _get_event_loop
try:
@@ -833,7 +820,7 @@ def set_child_watcher(watcher):
# functions in asyncio. Pure Python implementation is
# about 4 times slower than C-accelerated.
from _asyncio import (_get_running_loop, _set_running_loop,
- get_running_loop, get_event_loop, _get_event_loop)
+ get_running_loop, get_event_loop)
except ImportError:
pass
else:
@@ -842,7 +829,6 @@ def set_child_watcher(watcher):
_c__set_running_loop = _set_running_loop
_c_get_running_loop = get_running_loop
_c_get_event_loop = get_event_loop
- _c__get_event_loop = _get_event_loop
if hasattr(os, 'fork'):
diff --git a/Lib/asyncio/futures.py b/Lib/asyncio/futures.py
index 3a6b44a0910869..97fc4e3fcb60ee 100644
--- a/Lib/asyncio/futures.py
+++ b/Lib/asyncio/futures.py
@@ -77,7 +77,7 @@ def __init__(self, *, loop=None):
the default event loop.
"""
if loop is None:
- self._loop = events._get_event_loop()
+ self._loop = events.get_event_loop()
else:
self._loop = loop
self._callbacks = []
@@ -413,7 +413,7 @@ def wrap_future(future, *, loop=None):
assert isinstance(future, concurrent.futures.Future), \
f'concurrent.futures.Future is expected, got {future!r}'
if loop is None:
- loop = events._get_event_loop()
+ loop = events.get_event_loop()
new_future = loop.create_future()
_chain_future(future, new_future)
return new_future
diff --git a/Lib/asyncio/proactor_events.py b/Lib/asyncio/proactor_events.py
index c6aab408fc7410..1e2a730cf368a9 100644
--- a/Lib/asyncio/proactor_events.py
+++ b/Lib/asyncio/proactor_events.py
@@ -288,7 +288,8 @@ def _loop_reading(self, fut=None):
# we got end-of-file so no need to reschedule a new read
return
- data = self._data[:length]
+ # It's a new slice so make it immutable so protocols upstream don't have problems
+ data = bytes(memoryview(self._data)[:length])
else:
# the future will be replaced by next proactor.recv call
fut.cancel()
diff --git a/Lib/asyncio/streams.py b/Lib/asyncio/streams.py
index c4d837a1170819..0f9098b4195633 100644
--- a/Lib/asyncio/streams.py
+++ b/Lib/asyncio/streams.py
@@ -125,7 +125,7 @@ class FlowControlMixin(protocols.Protocol):
def __init__(self, loop=None):
if loop is None:
- self._loop = events._get_event_loop(stacklevel=4)
+ self._loop = events.get_event_loop()
else:
self._loop = loop
self._paused = False
@@ -404,7 +404,7 @@ def __init__(self, limit=_DEFAULT_LIMIT, loop=None):
self._limit = limit
if loop is None:
- self._loop = events._get_event_loop()
+ self._loop = events.get_event_loop()
else:
self._loop = loop
self._buffer = bytearray()
@@ -688,7 +688,7 @@ async def read(self, n=-1):
await self._wait_for_data('read')
# This will work right even if buffer is less than n bytes
- data = bytes(self._buffer[:n])
+ data = bytes(memoryview(self._buffer)[:n])
del self._buffer[:n]
self._maybe_resume_transport()
@@ -730,7 +730,7 @@ async def readexactly(self, n):
data = bytes(self._buffer)
self._buffer.clear()
else:
- data = bytes(self._buffer[:n])
+ data = bytes(memoryview(self._buffer)[:n])
del self._buffer[:n]
self._maybe_resume_transport()
return data
diff --git a/Lib/asyncio/tasks.py b/Lib/asyncio/tasks.py
index 571013745aa03a..fa853283c0c5e4 100644
--- a/Lib/asyncio/tasks.py
+++ b/Lib/asyncio/tasks.py
@@ -582,7 +582,7 @@ def as_completed(fs, *, timeout=None):
from .queues import Queue # Import here to avoid circular import problem.
done = Queue()
- loop = events._get_event_loop()
+ loop = events.get_event_loop()
todo = {ensure_future(f, loop=loop) for f in set(fs)}
timeout_handle = None
@@ -668,7 +668,7 @@ def _ensure_future(coro_or_future, *, loop=None):
'is required')
if loop is None:
- loop = events._get_event_loop(stacklevel=4)
+ loop = events.get_event_loop()
try:
return loop.create_task(coro_or_future)
except RuntimeError:
@@ -749,7 +749,7 @@ def gather(*coros_or_futures, return_exceptions=False):
gather won't cancel any other awaitables.
"""
if not coros_or_futures:
- loop = events._get_event_loop()
+ loop = events.get_event_loop()
outer = loop.create_future()
outer.set_result([])
return outer
diff --git a/Lib/csv.py b/Lib/csv.py
index 309a8f3f486365..4ef8be45ca9e0a 100644
--- a/Lib/csv.py
+++ b/Lib/csv.py
@@ -139,7 +139,8 @@ def __init__(self, f, fieldnames, restval="", extrasaction="raise",
fieldnames = list(fieldnames)
self.fieldnames = fieldnames # list of keys for the dict
self.restval = restval # for writing short dicts
- if extrasaction.lower() not in ("raise", "ignore"):
+ extrasaction = extrasaction.lower()
+ if extrasaction not in ("raise", "ignore"):
raise ValueError("extrasaction (%s) must be 'raise' or 'ignore'"
% extrasaction)
self.extrasaction = extrasaction
diff --git a/Lib/enum.py b/Lib/enum.py
index 1b683c702d59b4..a0cad066dc23f7 100644
--- a/Lib/enum.py
+++ b/Lib/enum.py
@@ -436,7 +436,9 @@ def __setitem__(self, key, value):
if isinstance(value, auto):
single = True
value = (value, )
- if isinstance(value, tuple):
+ if type(value) is tuple and any(isinstance(v, auto) for v in value):
+ # insist on an actual tuple, no subclasses, in keeping with only supporting
+ # top-level auto() usage (not contained in any other data structure)
auto_valued = []
for v in value:
if isinstance(v, auto):
@@ -955,7 +957,15 @@ def _find_data_repr_(mcls, class_name, bases):
return base._value_repr_
elif '__repr__' in base.__dict__:
# this is our data repr
- return base.__dict__['__repr__']
+ # double-check if a dataclass with a default __repr__
+ if (
+ '__dataclass_fields__' in base.__dict__
+ and '__dataclass_params__' in base.__dict__
+ and base.__dict__['__dataclass_params__'].repr
+ ):
+ return _dataclass_repr
+ else:
+ return base.__dict__['__repr__']
return None
@classmethod
@@ -1046,20 +1056,20 @@ class Enum(metaclass=EnumType):
Access them by:
- - attribute access::
+ - attribute access:
- >>> Color.RED
-
+ >>> Color.RED
+
- value lookup:
- >>> Color(1)
-
+ >>> Color(1)
+
- name lookup:
- >>> Color['RED']
-
+ >>> Color['RED']
+
Enumerations can be iterated over, and know how many members they have:
@@ -1551,6 +1561,14 @@ def _power_of_two(value):
return False
return value == 2 ** _high_bit(value)
+def _dataclass_repr(self):
+ dcf = self.__dataclass_fields__
+ return ', '.join(
+ '%s=%r' % (k, getattr(self, k))
+ for k in dcf.keys()
+ if dcf[k].repr
+ )
+
def global_enum_repr(self):
"""
use module.enum_name instead of class.enum_name
diff --git a/Lib/http/server.py b/Lib/http/server.py
index 8aee31bac2752a..8acabff605e795 100644
--- a/Lib/http/server.py
+++ b/Lib/http/server.py
@@ -93,6 +93,7 @@
import html
import http.client
import io
+import itertools
import mimetypes
import os
import posixpath
@@ -562,6 +563,11 @@ def log_error(self, format, *args):
self.log_message(format, *args)
+ # https://en.wikipedia.org/wiki/List_of_Unicode_characters#Control_codes
+ _control_char_table = str.maketrans(
+ {c: fr'\x{c:02x}' for c in itertools.chain(range(0x20), range(0x7f,0xa0))})
+ _control_char_table[ord('\\')] = r'\\'
+
def log_message(self, format, *args):
"""Log an arbitrary message.
@@ -577,12 +583,16 @@ def log_message(self, format, *args):
The client ip and current date/time are prefixed to
every message.
+ Unicode control characters are replaced with escaped hex
+ before writing the output to stderr.
+
"""
+ message = format % args
sys.stderr.write("%s - - [%s] %s\n" %
(self.address_string(),
self.log_date_time_string(),
- format%args))
+ message.translate(self._control_char_table)))
def version_string(self):
"""Return the server software version string."""
diff --git a/Lib/inspect.py b/Lib/inspect.py
index 31ac888126b57c..e165937e448a95 100644
--- a/Lib/inspect.py
+++ b/Lib/inspect.py
@@ -1160,7 +1160,6 @@ def __init__(self):
self.started = False
self.passline = False
self.indecorator = False
- self.decoratorhasargs = False
self.last = 1
self.body_col0 = None
@@ -1175,13 +1174,6 @@ def tokeneater(self, type, token, srowcol, erowcol, line):
self.islambda = True
self.started = True
self.passline = True # skip to the end of the line
- elif token == "(":
- if self.indecorator:
- self.decoratorhasargs = True
- elif token == ")":
- if self.indecorator:
- self.indecorator = False
- self.decoratorhasargs = False
elif type == tokenize.NEWLINE:
self.passline = False # stop skipping when a NEWLINE is seen
self.last = srowcol[0]
@@ -1189,7 +1181,7 @@ def tokeneater(self, type, token, srowcol, erowcol, line):
raise EndOfBlock
# hitting a NEWLINE when in a decorator without args
# ends the decorator
- if self.indecorator and not self.decoratorhasargs:
+ if self.indecorator:
self.indecorator = False
elif self.passline:
pass
diff --git a/Lib/ntpath.py b/Lib/ntpath.py
index 873c884c3bd934..265eaa8d4b953f 100644
--- a/Lib/ntpath.py
+++ b/Lib/ntpath.py
@@ -663,12 +663,15 @@ def _getfinalpathname_nonstrict(path):
# 21: ERROR_NOT_READY (implies drive with no media)
# 32: ERROR_SHARING_VIOLATION (probably an NTFS paging file)
# 50: ERROR_NOT_SUPPORTED
+ # 53: ERROR_BAD_NETPATH
+ # 65: ERROR_NETWORK_ACCESS_DENIED
# 67: ERROR_BAD_NET_NAME (implies remote server unavailable)
# 87: ERROR_INVALID_PARAMETER
# 123: ERROR_INVALID_NAME
+ # 161: ERROR_BAD_PATHNAME
# 1920: ERROR_CANT_ACCESS_FILE
# 1921: ERROR_CANT_RESOLVE_FILENAME (implies unfollowable symlink)
- allowed_winerror = 1, 2, 3, 5, 21, 32, 50, 67, 87, 123, 1920, 1921
+ allowed_winerror = 1, 2, 3, 5, 21, 32, 50, 53, 65, 67, 87, 123, 161, 1920, 1921
# Non-strict algorithm is to find as much of the target directory
# as we can and join the rest.
diff --git a/Lib/opcode.py b/Lib/opcode.py
index fa6dbe5d24170c..fc57affbac5814 100644
--- a/Lib/opcode.py
+++ b/Lib/opcode.py
@@ -320,6 +320,7 @@ def pseudo_op(name, op, real_ops):
],
"FOR_ITER": [
"FOR_ITER_LIST",
+ "FOR_ITER_TUPLE",
"FOR_ITER_RANGE",
"FOR_ITER_GEN",
],
diff --git a/Lib/pydoc_data/topics.py b/Lib/pydoc_data/topics.py
index a817dc3547fa93..4ba680cceda93a 100644
--- a/Lib/pydoc_data/topics.py
+++ b/Lib/pydoc_data/topics.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Autogenerated by Sphinx on Mon Nov 14 12:13:19 2022
+# Autogenerated by Sphinx on Tue Dec 6 19:31:49 2022
topics = {'assert': 'The "assert" statement\n'
'**********************\n'
'\n'
@@ -11109,8 +11109,9 @@
'y)" is\n'
'typically invalid without special support in "MyClass". To '
'be able to\n'
- 'use that kind of patterns, the class needs to define a\n'
- '*__match_args__* attribute.\n'
+ 'use that kind of pattern, the class needs to define a '
+ '*__match_args__*\n'
+ 'attribute.\n'
'\n'
'object.__match_args__\n'
'\n'
@@ -11510,7 +11511,7 @@
'property\n'
' being one of “Lm”, “Lt”, “Lu”, “Ll”, or “Lo”. Note '
'that this is\n'
- ' different from the “Alphabetic” property defined in the '
+ ' different from the Alphabetic property defined in the '
'Unicode\n'
' Standard.\n'
'\n'
@@ -11559,9 +11560,9 @@
'according to the\n'
' language definition, section Identifiers and keywords.\n'
'\n'
- ' Call "keyword.iskeyword()" to test whether string "s" '
- 'is a reserved\n'
- ' identifier, such as "def" and "class".\n'
+ ' "keyword.iskeyword()" can be used to test whether '
+ 'string "s" is a\n'
+ ' reserved identifier, such as "def" and "class".\n'
'\n'
' Example:\n'
'\n'
diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py
index a66f4f5b897cd3..2fa75eb4d11311 100644
--- a/Lib/test/_test_multiprocessing.py
+++ b/Lib/test/_test_multiprocessing.py
@@ -6042,5 +6042,5 @@ def test_semlock_subclass(self):
class SemLock(_multiprocessing.SemLock):
pass
name = f'test_semlock_subclass-{os.getpid()}'
- s = SemLock(1, 0, 10, name, 0)
+ s = SemLock(1, 0, 10, name, False)
_multiprocessing.sem_unlink(name)
diff --git a/Lib/test/inspect_fodder2.py b/Lib/test/inspect_fodder2.py
index e7d4b53ebefcc6..2dc49817087c44 100644
--- a/Lib/test/inspect_fodder2.py
+++ b/Lib/test/inspect_fodder2.py
@@ -259,3 +259,17 @@ def all_markers_with_args_and_kwargs(a, b, /, c, d, *args, e, f, **kwargs):
#line 259
def all_markers_with_defaults(a, b=1, /, c=2, d=3, *, e=4, f=5):
pass
+
+# line 263
+def deco_factory(**kwargs):
+ def deco(f):
+ @wraps(f)
+ def wrapper(*a, **kwd):
+ kwd.update(kwargs)
+ return f(*a, **kwd)
+ return wrapper
+ return deco
+
+@deco_factory(foo=(1 + 2), bar=lambda: 1)
+def complex_decorated(foo=0, bar=lambda: 0):
+ return foo + bar()
diff --git a/Lib/test/libregrtest/main.py b/Lib/test/libregrtest/main.py
index 3eeef029b22d48..19ccf2db5e7f06 100644
--- a/Lib/test/libregrtest/main.py
+++ b/Lib/test/libregrtest/main.py
@@ -17,7 +17,8 @@
ChildError, DidNotRun)
from test.libregrtest.setup import setup_tests
from test.libregrtest.pgo import setup_pgo_tests
-from test.libregrtest.utils import removepy, count, format_duration, printlist
+from test.libregrtest.utils import (removepy, count, format_duration,
+ printlist, get_build_info)
from test import support
from test.support import os_helper
from test.support import threading_helper
@@ -491,6 +492,7 @@ def display_header(self):
print("==", platform.python_implementation(), *sys.version.split())
print("==", platform.platform(aliased=True),
"%s-endian" % sys.byteorder)
+ print("== Python build:", ' '.join(get_build_info()))
print("== cwd:", os.getcwd())
cpu_count = os.cpu_count()
if cpu_count:
diff --git a/Lib/test/libregrtest/utils.py b/Lib/test/libregrtest/utils.py
index e6909170334e15..fb13fa0e243ba7 100644
--- a/Lib/test/libregrtest/utils.py
+++ b/Lib/test/libregrtest/utils.py
@@ -1,6 +1,7 @@
import math
import os.path
import sys
+import sysconfig
import textwrap
from test import support
@@ -208,3 +209,87 @@ def clear_caches():
pass
else:
fractions._hash_algorithm.cache_clear()
+
+
+def get_build_info():
+ # Get most important configure and build options as a list of strings.
+ # Example: ['debug', 'ASAN+MSAN'] or ['release', 'LTO+PGO'].
+
+ config_args = sysconfig.get_config_var('CONFIG_ARGS') or ''
+ cflags = sysconfig.get_config_var('PY_CFLAGS') or ''
+ cflags_nodist = sysconfig.get_config_var('PY_CFLAGS_NODIST') or ''
+ ldflags_nodist = sysconfig.get_config_var('PY_LDFLAGS_NODIST') or ''
+
+ build = []
+ if hasattr(sys, 'gettotalrefcount'):
+ # --with-pydebug
+ build.append('debug')
+
+ if '-DNDEBUG' in (cflags + cflags_nodist):
+ build.append('without_assert')
+ else:
+ build.append('release')
+
+ if '--with-assertions' in config_args:
+ build.append('with_assert')
+ elif '-DNDEBUG' not in (cflags + cflags_nodist):
+ build.append('with_assert')
+
+ # --enable-framework=name
+ framework = sysconfig.get_config_var('PYTHONFRAMEWORK')
+ if framework:
+ build.append(f'framework={framework}')
+
+ # --enable-shared
+ shared = int(sysconfig.get_config_var('PY_ENABLE_SHARED') or '0')
+ if shared:
+ build.append('shared')
+
+ # --with-lto
+ optimizations = []
+ if '-flto=thin' in ldflags_nodist:
+ optimizations.append('ThinLTO')
+ elif '-flto' in ldflags_nodist:
+ optimizations.append('LTO')
+
+ # --enable-optimizations
+ pgo_options = (
+ # GCC
+ '-fprofile-use',
+ # clang: -fprofile-instr-use=code.profclangd
+ '-fprofile-instr-use',
+ # ICC
+ "-prof-use",
+ )
+ if any(option in cflags_nodist for option in pgo_options):
+ optimizations.append('PGO')
+ if optimizations:
+ build.append('+'.join(optimizations))
+
+ # --with-address-sanitizer
+ sanitizers = []
+ if support.check_sanitizer(address=True):
+ sanitizers.append("ASAN")
+ # --with-memory-sanitizer
+ if support.check_sanitizer(memory=True):
+ sanitizers.append("MSAN")
+ # --with-undefined-behavior-sanitizer
+ if support.check_sanitizer(ub=True):
+ sanitizers.append("UBSAN")
+ if sanitizers:
+ build.append('+'.join(sanitizers))
+
+ # --with-trace-refs
+ if hasattr(sys, 'getobjects'):
+ build.append("TraceRefs")
+ # --enable-pystats
+ if hasattr(sys, '_stats_on'):
+ build.append("pystats")
+ # --with-valgrind
+ if sysconfig.get_config_var('WITH_VALGRIND'):
+ build.append("valgrind")
+ # --with-dtrace
+ if sysconfig.get_config_var('WITH_DTRACE'):
+ build.append("dtrace")
+
+ return build
diff --git a/Lib/test/test__xxsubinterpreters.py b/Lib/test/test__xxsubinterpreters.py
index 66f29b95af10c3..18900bb9f7162c 100644
--- a/Lib/test/test__xxsubinterpreters.py
+++ b/Lib/test/test__xxsubinterpreters.py
@@ -295,8 +295,8 @@ def clean_up_channels():
class TestBase(unittest.TestCase):
def tearDown(self):
- clean_up_interpreters()
clean_up_channels()
+ clean_up_interpreters()
##################################
@@ -386,7 +386,6 @@ def test_types(self):
self._assert_values([
b'spam',
9999,
- self.cid,
])
def test_bytes(self):
@@ -412,6 +411,15 @@ def test_non_shareable_int(self):
interpreters.channel_send(self.cid, i)
+class ModuleTests(TestBase):
+
+ def test_import_in_interpreter(self):
+ _run_output(
+ interpreters.create(),
+ 'import _xxsubinterpreters as _interpreters',
+ )
+
+
##################################
# interpreter tests
@@ -1213,6 +1221,18 @@ def test_equality(self):
self.assertFalse(cid1 != cid2)
self.assertTrue(cid1 != cid3)
+ def test_shareable(self):
+ chan = interpreters.channel_create()
+
+ obj = interpreters.channel_create()
+ interpreters.channel_send(chan, obj)
+ got = interpreters.channel_recv(chan)
+
+ self.assertEqual(got, obj)
+ self.assertIs(type(got), type(obj))
+ # XXX Check the following in the channel tests?
+ #self.assertIsNot(got, obj)
+
class ChannelTests(TestBase):
@@ -1545,6 +1565,19 @@ def test_recv_default(self):
self.assertEqual(obj5, b'eggs')
self.assertIs(obj6, default)
+ def test_recv_sending_interp_destroyed(self):
+ cid = interpreters.channel_create()
+ interp = interpreters.create()
+ interpreters.run_string(interp, dedent(f"""
+ import _xxsubinterpreters as _interpreters
+ _interpreters.channel_send({cid}, b'spam')
+ """))
+ interpreters.destroy(interp)
+
+ with self.assertRaisesRegex(RuntimeError,
+ 'unrecognized interpreter ID'):
+ interpreters.channel_recv(cid)
+
def test_run_string_arg_unresolved(self):
cid = interpreters.channel_create()
interp = interpreters.create()
diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py
index 773fba87632b0a..ab6a63faa59085 100644
--- a/Lib/test/test_ast.py
+++ b/Lib/test/test_ast.py
@@ -837,7 +837,8 @@ def check_limit(prefix, repeated):
details = "Compiling ({!r} + {!r} * {})".format(
prefix, repeated, depth)
with self.assertRaises(RecursionError, msg=details):
- ast.parse(broken)
+ with support.infinite_recursion():
+ ast.parse(broken)
check_limit("a", "()")
check_limit("a", ".b")
diff --git a/Lib/test/test_asyncio/test_base_events.py b/Lib/test/test_asyncio/test_base_events.py
index 7421d18dc636c8..3b4026cb73869a 100644
--- a/Lib/test/test_asyncio/test_base_events.py
+++ b/Lib/test/test_asyncio/test_base_events.py
@@ -746,7 +746,7 @@ async def coro():
def test_env_var_debug(self):
code = '\n'.join((
'import asyncio',
- 'loop = asyncio.get_event_loop()',
+ 'loop = asyncio.new_event_loop()',
'print(loop.get_debug())'))
# Test with -E to not fail if the unit test was run with
@@ -861,20 +861,15 @@ async def raise_keyboard_interrupt():
self.loop._process_events = mock.Mock()
- try:
+ with self.assertRaises(KeyboardInterrupt):
self.loop.run_until_complete(raise_keyboard_interrupt())
- except KeyboardInterrupt:
- pass
def func():
self.loop.stop()
func.called = True
func.called = False
- try:
- self.loop.call_soon(func)
- self.loop.run_forever()
- except KeyboardInterrupt:
- pass
+ self.loop.call_soon(self.loop.call_soon, func)
+ self.loop.run_forever()
self.assertTrue(func.called)
def test_single_selecter_event_callback_after_stopping(self):
diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py
index cabe75f56d9fb0..153b2de8172273 100644
--- a/Lib/test/test_asyncio/test_events.py
+++ b/Lib/test/test_asyncio/test_events.py
@@ -2550,29 +2550,8 @@ def test_event_loop_policy(self):
def test_get_event_loop(self):
policy = asyncio.DefaultEventLoopPolicy()
self.assertIsNone(policy._local._loop)
-
- loop = policy.get_event_loop()
- self.assertIsInstance(loop, asyncio.AbstractEventLoop)
-
- self.assertIs(policy._local._loop, loop)
- self.assertIs(loop, policy.get_event_loop())
- loop.close()
-
- def test_get_event_loop_calls_set_event_loop(self):
- policy = asyncio.DefaultEventLoopPolicy()
-
- with mock.patch.object(
- policy, "set_event_loop",
- wraps=policy.set_event_loop) as m_set_event_loop:
-
- loop = policy.get_event_loop()
-
- # policy._local._loop must be set through .set_event_loop()
- # (the unix DefaultEventLoopPolicy needs this call to attach
- # the child watcher correctly)
- m_set_event_loop.assert_called_with(loop)
-
- loop.close()
+ with self.assertRaisesRegex(RuntimeError, 'no current event loop'):
+ policy.get_event_loop()
def test_get_event_loop_after_set_none(self):
policy = asyncio.DefaultEventLoopPolicy()
@@ -2599,7 +2578,8 @@ def test_new_event_loop(self):
def test_set_event_loop(self):
policy = asyncio.DefaultEventLoopPolicy()
- old_loop = policy.get_event_loop()
+ old_loop = policy.new_event_loop()
+ policy.set_event_loop(old_loop)
self.assertRaises(TypeError, policy.set_event_loop, object())
@@ -2716,15 +2696,11 @@ def get_event_loop(self):
asyncio.set_event_loop_policy(Policy())
loop = asyncio.new_event_loop()
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaises(TestError):
- asyncio.get_event_loop()
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaises(TestError):
+ asyncio.get_event_loop()
asyncio.set_event_loop(None)
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaises(TestError):
- asyncio.get_event_loop()
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaises(TestError):
+ asyncio.get_event_loop()
with self.assertRaisesRegex(RuntimeError, 'no running'):
asyncio.get_running_loop()
@@ -2738,16 +2714,11 @@ async def func():
loop.run_until_complete(func())
asyncio.set_event_loop(loop)
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaises(TestError):
- asyncio.get_event_loop()
- self.assertEqual(cm.filename, __file__)
-
+ with self.assertRaises(TestError):
+ asyncio.get_event_loop()
asyncio.set_event_loop(None)
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaises(TestError):
- asyncio.get_event_loop()
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaises(TestError):
+ asyncio.get_event_loop()
finally:
asyncio.set_event_loop_policy(old_policy)
@@ -2766,15 +2737,11 @@ def test_get_event_loop_returns_running_loop2(self):
loop = asyncio.new_event_loop()
self.addCleanup(loop.close)
- with self.assertWarns(DeprecationWarning) as cm:
- loop2 = asyncio.get_event_loop()
- self.addCleanup(loop2.close)
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaisesRegex(RuntimeError, 'no current'):
+ asyncio.get_event_loop()
asyncio.set_event_loop(None)
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaisesRegex(RuntimeError, 'no current'):
- asyncio.get_event_loop()
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaisesRegex(RuntimeError, 'no current'):
+ asyncio.get_event_loop()
with self.assertRaisesRegex(RuntimeError, 'no running'):
asyncio.get_running_loop()
@@ -2788,15 +2755,11 @@ async def func():
loop.run_until_complete(func())
asyncio.set_event_loop(loop)
- with self.assertWarns(DeprecationWarning) as cm:
- self.assertIs(asyncio.get_event_loop(), loop)
- self.assertEqual(cm.filename, __file__)
+ self.assertIs(asyncio.get_event_loop(), loop)
asyncio.set_event_loop(None)
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaisesRegex(RuntimeError, 'no current'):
- asyncio.get_event_loop()
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaisesRegex(RuntimeError, 'no current'):
+ asyncio.get_event_loop()
finally:
asyncio.set_event_loop_policy(old_policy)
diff --git a/Lib/test/test_asyncio/test_futures.py b/Lib/test/test_asyncio/test_futures.py
index 83ea01c2452521..56b0b864de2ddf 100644
--- a/Lib/test/test_asyncio/test_futures.py
+++ b/Lib/test/test_asyncio/test_futures.py
@@ -146,10 +146,8 @@ def test_initial_state(self):
self.assertTrue(f.cancelled())
def test_constructor_without_loop(self):
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaisesRegex(RuntimeError, 'There is no current event loop'):
- self._new_future()
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaisesRegex(RuntimeError, 'no current event loop'):
+ self._new_future()
def test_constructor_use_running_loop(self):
async def test():
@@ -159,12 +157,10 @@ async def test():
self.assertIs(f.get_loop(), self.loop)
def test_constructor_use_global_loop(self):
- # Deprecated in 3.10
+ # Deprecated in 3.10, undeprecated in 3.12
asyncio.set_event_loop(self.loop)
self.addCleanup(asyncio.set_event_loop, None)
- with self.assertWarns(DeprecationWarning) as cm:
- f = self._new_future()
- self.assertEqual(cm.filename, __file__)
+ f = self._new_future()
self.assertIs(f._loop, self.loop)
self.assertIs(f.get_loop(), self.loop)
@@ -500,10 +496,8 @@ def run(arg):
return (arg, threading.get_ident())
ex = concurrent.futures.ThreadPoolExecutor(1)
f1 = ex.submit(run, 'oi')
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaises(RuntimeError):
- asyncio.wrap_future(f1)
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaisesRegex(RuntimeError, 'no current event loop'):
+ asyncio.wrap_future(f1)
ex.shutdown(wait=True)
def test_wrap_future_use_running_loop(self):
@@ -518,16 +512,14 @@ async def test():
ex.shutdown(wait=True)
def test_wrap_future_use_global_loop(self):
- # Deprecated in 3.10
+ # Deprecated in 3.10, undeprecated in 3.12
asyncio.set_event_loop(self.loop)
self.addCleanup(asyncio.set_event_loop, None)
def run(arg):
return (arg, threading.get_ident())
ex = concurrent.futures.ThreadPoolExecutor(1)
f1 = ex.submit(run, 'oi')
- with self.assertWarns(DeprecationWarning) as cm:
- f2 = asyncio.wrap_future(f1)
- self.assertEqual(cm.filename, __file__)
+ f2 = asyncio.wrap_future(f1)
self.assertIs(self.loop, f2._loop)
ex.shutdown(wait=True)
diff --git a/Lib/test/test_asyncio/test_proactor_events.py b/Lib/test/test_asyncio/test_proactor_events.py
index ae30185cef776a..6cb7dc300c5331 100644
--- a/Lib/test/test_asyncio/test_proactor_events.py
+++ b/Lib/test/test_asyncio/test_proactor_events.py
@@ -75,7 +75,10 @@ def test_loop_reading_data(self):
called_buf = bytearray(self.buffer_size)
called_buf[:len(buf)] = buf
self.loop._proactor.recv_into.assert_called_with(self.sock, called_buf)
- self.protocol.data_received.assert_called_with(bytearray(buf))
+ self.protocol.data_received.assert_called_with(buf)
+ # assert_called_with maps bytearray and bytes to the same thing so check manually
+ # regression test for https://github.com/python/cpython/issues/99941
+ self.assertIsInstance(self.protocol.data_received.call_args.args[0], bytes)
@unittest.skipIf(sys.flags.optimize, "Assertions are disabled in optimized mode")
def test_loop_reading_no_data(self):
diff --git a/Lib/test/test_asyncio/test_ssl.py b/Lib/test/test_asyncio/test_ssl.py
index 5de9b7a14e87da..aaf3c37101f52a 100644
--- a/Lib/test/test_asyncio/test_ssl.py
+++ b/Lib/test/test_asyncio/test_ssl.py
@@ -1689,7 +1689,7 @@ def stop(self):
def run(self):
try:
with self._sock:
- self._sock.setblocking(0)
+ self._sock.setblocking(False)
self._run()
finally:
self._s1.close()
diff --git a/Lib/test/test_asyncio/test_streams.py b/Lib/test/test_asyncio/test_streams.py
index 01d5407a497a04..7f9dc621808358 100644
--- a/Lib/test/test_asyncio/test_streams.py
+++ b/Lib/test/test_asyncio/test_streams.py
@@ -816,10 +816,8 @@ def test_read_all_from_pipe_reader(self):
self.assertEqual(data, b'data')
def test_streamreader_constructor_without_loop(self):
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaisesRegex(RuntimeError, 'There is no current event loop'):
- asyncio.StreamReader()
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaisesRegex(RuntimeError, 'no current event loop'):
+ asyncio.StreamReader()
def test_streamreader_constructor_use_running_loop(self):
# asyncio issue #184: Ensure that StreamReaderProtocol constructor
@@ -833,21 +831,17 @@ async def test():
def test_streamreader_constructor_use_global_loop(self):
# asyncio issue #184: Ensure that StreamReaderProtocol constructor
# retrieves the current loop if the loop parameter is not set
- # Deprecated in 3.10
+ # Deprecated in 3.10, undeprecated in 3.12
self.addCleanup(asyncio.set_event_loop, None)
asyncio.set_event_loop(self.loop)
- with self.assertWarns(DeprecationWarning) as cm:
- reader = asyncio.StreamReader()
- self.assertEqual(cm.filename, __file__)
+ reader = asyncio.StreamReader()
self.assertIs(reader._loop, self.loop)
def test_streamreaderprotocol_constructor_without_loop(self):
reader = mock.Mock()
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaisesRegex(RuntimeError, 'There is no current event loop'):
- asyncio.StreamReaderProtocol(reader)
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaisesRegex(RuntimeError, 'no current event loop'):
+ asyncio.StreamReaderProtocol(reader)
def test_streamreaderprotocol_constructor_use_running_loop(self):
# asyncio issue #184: Ensure that StreamReaderProtocol constructor
@@ -861,13 +855,11 @@ async def test():
def test_streamreaderprotocol_constructor_use_global_loop(self):
# asyncio issue #184: Ensure that StreamReaderProtocol constructor
# retrieves the current loop if the loop parameter is not set
- # Deprecated in 3.10
+ # Deprecated in 3.10, undeprecated in 3.12
self.addCleanup(asyncio.set_event_loop, None)
asyncio.set_event_loop(self.loop)
reader = mock.Mock()
- with self.assertWarns(DeprecationWarning) as cm:
- protocol = asyncio.StreamReaderProtocol(reader)
- self.assertEqual(cm.filename, __file__)
+ protocol = asyncio.StreamReaderProtocol(reader)
self.assertIs(protocol._loop, self.loop)
def test_multiple_drain(self):
diff --git a/Lib/test/test_asyncio/test_tasks.py b/Lib/test/test_asyncio/test_tasks.py
index d8ba2f4e2a742a..5168b8250ef0a2 100644
--- a/Lib/test/test_asyncio/test_tasks.py
+++ b/Lib/test/test_asyncio/test_tasks.py
@@ -196,10 +196,8 @@ async def notmuch():
a = notmuch()
self.addCleanup(a.close)
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaisesRegex(RuntimeError, 'There is no current event loop'):
- asyncio.ensure_future(a)
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaisesRegex(RuntimeError, 'no current event loop'):
+ asyncio.ensure_future(a)
async def test():
return asyncio.ensure_future(notmuch())
@@ -209,12 +207,10 @@ async def test():
self.assertTrue(t.done())
self.assertEqual(t.result(), 'ok')
- # Deprecated in 3.10
+ # Deprecated in 3.10, undeprecated in 3.12
asyncio.set_event_loop(self.loop)
self.addCleanup(asyncio.set_event_loop, None)
- with self.assertWarns(DeprecationWarning) as cm:
- t = asyncio.ensure_future(notmuch())
- self.assertEqual(cm.filename, __file__)
+ t = asyncio.ensure_future(notmuch())
self.assertIs(t._loop, self.loop)
self.loop.run_until_complete(t)
self.assertTrue(t.done())
@@ -1532,10 +1528,8 @@ async def coro():
self.addCleanup(a.close)
futs = asyncio.as_completed([a])
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaisesRegex(RuntimeError, 'There is no current event loop'):
- list(futs)
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaisesRegex(RuntimeError, 'no current event loop'):
+ list(futs)
def test_as_completed_coroutine_use_running_loop(self):
loop = self.new_test_loop()
@@ -1965,10 +1959,8 @@ async def coro():
inner = coro()
self.addCleanup(inner.close)
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaisesRegex(RuntimeError, 'There is no current event loop'):
- asyncio.shield(inner)
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaisesRegex(RuntimeError, 'no current event loop'):
+ asyncio.shield(inner)
def test_shield_coroutine_use_running_loop(self):
async def coro():
@@ -1982,15 +1974,13 @@ async def test():
self.assertEqual(res, 42)
def test_shield_coroutine_use_global_loop(self):
- # Deprecated in 3.10
+ # Deprecated in 3.10, undeprecated in 3.12
async def coro():
return 42
asyncio.set_event_loop(self.loop)
self.addCleanup(asyncio.set_event_loop, None)
- with self.assertWarns(DeprecationWarning) as cm:
- outer = asyncio.shield(coro())
- self.assertEqual(cm.filename, __file__)
+ outer = asyncio.shield(coro())
self.assertEqual(outer._loop, self.loop)
res = self.loop.run_until_complete(outer)
self.assertEqual(res, 42)
@@ -2102,8 +2092,8 @@ def test_cancel_gather_1(self):
async def create():
# The indirection fut->child_coro is needed since otherwise the
# gathering task is done at the same time as the child future
- def child_coro():
- return (yield from fut)
+ async def child_coro():
+ return await fut
gather_future = asyncio.gather(child_coro())
return asyncio.ensure_future(gather_future)
gather_task = loop.run_until_complete(create())
@@ -2827,7 +2817,7 @@ def test_current_task_no_running_loop(self):
self.assertIsNone(asyncio.current_task(loop=self.loop))
def test_current_task_no_running_loop_implicit(self):
- with self.assertRaises(RuntimeError):
+ with self.assertRaisesRegex(RuntimeError, 'no running event loop'):
asyncio.current_task()
def test_current_task_with_implicit_loop(self):
@@ -2991,10 +2981,8 @@ def _gather(self, *args, **kwargs):
return asyncio.gather(*args, **kwargs)
def test_constructor_empty_sequence_without_loop(self):
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaises(RuntimeError):
- asyncio.gather()
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaisesRegex(RuntimeError, 'no current event loop'):
+ asyncio.gather()
def test_constructor_empty_sequence_use_running_loop(self):
async def gather():
@@ -3007,12 +2995,10 @@ async def gather():
self.assertEqual(fut.result(), [])
def test_constructor_empty_sequence_use_global_loop(self):
- # Deprecated in 3.10
+ # Deprecated in 3.10, undeprecated in 3.12
asyncio.set_event_loop(self.one_loop)
self.addCleanup(asyncio.set_event_loop, None)
- with self.assertWarns(DeprecationWarning) as cm:
- fut = asyncio.gather()
- self.assertEqual(cm.filename, __file__)
+ fut = asyncio.gather()
self.assertIsInstance(fut, asyncio.Future)
self.assertIs(fut._loop, self.one_loop)
self._run_loop(self.one_loop)
@@ -3100,10 +3086,8 @@ async def coro():
self.addCleanup(gen1.close)
gen2 = coro()
self.addCleanup(gen2.close)
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaises(RuntimeError):
- asyncio.gather(gen1, gen2)
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaisesRegex(RuntimeError, 'no current event loop'):
+ asyncio.gather(gen1, gen2)
def test_constructor_use_running_loop(self):
async def coro():
@@ -3117,16 +3101,14 @@ async def gather():
self.one_loop.run_until_complete(fut)
def test_constructor_use_global_loop(self):
- # Deprecated in 3.10
+ # Deprecated in 3.10, undeprecated in 3.12
async def coro():
return 'abc'
asyncio.set_event_loop(self.other_loop)
self.addCleanup(asyncio.set_event_loop, None)
gen1 = coro()
gen2 = coro()
- with self.assertWarns(DeprecationWarning) as cm:
- fut = asyncio.gather(gen1, gen2)
- self.assertEqual(cm.filename, __file__)
+ fut = asyncio.gather(gen1, gen2)
self.assertIs(fut._loop, self.other_loop)
self.other_loop.run_until_complete(fut)
diff --git a/Lib/test/test_asyncio/test_unix_events.py b/Lib/test/test_asyncio/test_unix_events.py
index 092edb215854b7..600a5900da088d 100644
--- a/Lib/test/test_asyncio/test_unix_events.py
+++ b/Lib/test/test_asyncio/test_unix_events.py
@@ -1775,7 +1775,8 @@ def f():
def test_child_watcher_replace_mainloop_existing(self):
policy = self.create_policy()
- loop = policy.get_event_loop()
+ loop = policy.new_event_loop()
+ policy.set_event_loop(loop)
# Explicitly setup SafeChildWatcher,
# default ThreadedChildWatcher has no _loop property
@@ -1884,13 +1885,15 @@ async def test_fork_not_share_event_loop(self):
# child
try:
loop = asyncio.get_event_loop_policy().get_event_loop()
- os.write(w, str(id(loop)).encode())
+ except RuntimeError:
+ os.write(w, b'NO LOOP')
+ except:
+ os.write(w, b'ERROR:' + ascii(sys.exc_info()).encode())
finally:
os._exit(0)
else:
# parent
- child_loop = int(os.read(r, 100).decode())
- self.assertNotEqual(child_loop, id(loop))
+ self.assertEqual(os.read(r, 100), b'NO LOOP')
wait_process(pid, exitcode=0)
@hashlib_helper.requires_hashdigest('md5')
@@ -1907,7 +1910,6 @@ def test_fork_signal_handling(self):
def child_main():
signal.signal(signal.SIGTERM, lambda *args: child_handled.set())
child_started.set()
- time.sleep(1)
async def main():
loop = asyncio.get_running_loop()
diff --git a/Lib/test/test_call.py b/Lib/test/test_call.py
index f148b5ebbc5a9b..c17528be97b484 100644
--- a/Lib/test/test_call.py
+++ b/Lib/test/test_call.py
@@ -559,7 +559,7 @@ def __index__(self):
self.kwargs.clear()
gc.collect()
return 0
- x = IntWithDict(dont_inherit=IntWithDict())
+ x = IntWithDict(optimize=IntWithDict())
# We test the argument handling of "compile" here, the compilation
# itself is not relevant. When we pass flags=x below, x.__index__() is
# called, which changes the keywords dict.
diff --git a/Lib/test/test_capi/test_misc.py b/Lib/test/test_capi/test_misc.py
index 1d30adaee9218f..06a51aa3cc219a 100644
--- a/Lib/test/test_capi/test_misc.py
+++ b/Lib/test/test_capi/test_misc.py
@@ -138,8 +138,9 @@ def test_seq_bytes_to_charp_array(self):
class Z(object):
def __len__(self):
return 1
- self.assertRaises(TypeError, _posixsubprocess.fork_exec,
- 1,Z(),3,(1, 2),5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23)
+ with self.assertRaisesRegex(TypeError, 'indexing'):
+ _posixsubprocess.fork_exec(
+ 1,Z(),True,(1, 2),5,6,7,8,9,10,11,12,13,14,True,True,17,False,19,20,21,22,False)
# Issue #15736: overflow in _PySequence_BytesToCharpArray()
class Z(object):
def __len__(self):
@@ -147,7 +148,7 @@ def __len__(self):
def __getitem__(self, i):
return b'x'
self.assertRaises(MemoryError, _posixsubprocess.fork_exec,
- 1,Z(),3,(1, 2),5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23)
+ 1,Z(),True,(1, 2),5,6,7,8,9,10,11,12,13,14,True,True,17,False,19,20,21,22,False)
@unittest.skipUnless(_posixsubprocess, '_posixsubprocess required for this test.')
def test_subprocess_fork_exec(self):
@@ -157,7 +158,7 @@ def __len__(self):
# Issue #15738: crash in subprocess_fork_exec()
self.assertRaises(TypeError, _posixsubprocess.fork_exec,
- Z(),[b'1'],3,(1, 2),5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23)
+ Z(),[b'1'],True,(1, 2),5,6,7,8,9,10,11,12,13,14,True,True,17,False,19,20,21,22,False)
@unittest.skipIf(MISSING_C_DOCSTRINGS,
"Signature information for builtins requires docstrings")
diff --git a/Lib/test/test_capi/test_watchers.py b/Lib/test/test_capi/test_watchers.py
index 5e4f42a86006bd..1922614ef60558 100644
--- a/Lib/test/test_capi/test_watchers.py
+++ b/Lib/test/test_capi/test_watchers.py
@@ -336,6 +336,74 @@ def test_no_more_ids_available(self):
self.add_watcher()
+class TestCodeObjectWatchers(unittest.TestCase):
+ @contextmanager
+ def code_watcher(self, which_watcher):
+ wid = _testcapi.add_code_watcher(which_watcher)
+ try:
+ yield wid
+ finally:
+ _testcapi.clear_code_watcher(wid)
+
+ def assert_event_counts(self, exp_created_0, exp_destroyed_0,
+ exp_created_1, exp_destroyed_1):
+ self.assertEqual(
+ exp_created_0, _testcapi.get_code_watcher_num_created_events(0))
+ self.assertEqual(
+ exp_destroyed_0, _testcapi.get_code_watcher_num_destroyed_events(0))
+ self.assertEqual(
+ exp_created_1, _testcapi.get_code_watcher_num_created_events(1))
+ self.assertEqual(
+ exp_destroyed_1, _testcapi.get_code_watcher_num_destroyed_events(1))
+
+ def test_code_object_events_dispatched(self):
+ # verify that all counts are zero before any watchers are registered
+ self.assert_event_counts(0, 0, 0, 0)
+
+ # verify that all counts remain zero when a code object is
+ # created and destroyed with no watchers registered
+ co1 = _testcapi.code_newempty("test_watchers", "dummy1", 0)
+ self.assert_event_counts(0, 0, 0, 0)
+ del co1
+ self.assert_event_counts(0, 0, 0, 0)
+
+ # verify counts are as expected when first watcher is registered
+ with self.code_watcher(0):
+ self.assert_event_counts(0, 0, 0, 0)
+ co2 = _testcapi.code_newempty("test_watchers", "dummy2", 0)
+ self.assert_event_counts(1, 0, 0, 0)
+ del co2
+ self.assert_event_counts(1, 1, 0, 0)
+
+ # again with second watcher registered
+ with self.code_watcher(1):
+ self.assert_event_counts(1, 1, 0, 0)
+ co3 = _testcapi.code_newempty("test_watchers", "dummy3", 0)
+ self.assert_event_counts(2, 1, 1, 0)
+ del co3
+ self.assert_event_counts(2, 2, 1, 1)
+
+ # verify counts are reset and don't change after both watchers are cleared
+ co4 = _testcapi.code_newempty("test_watchers", "dummy4", 0)
+ self.assert_event_counts(0, 0, 0, 0)
+ del co4
+ self.assert_event_counts(0, 0, 0, 0)
+
+ def test_clear_out_of_range_watcher_id(self):
+ with self.assertRaisesRegex(ValueError, r"Invalid code watcher ID -1"):
+ _testcapi.clear_code_watcher(-1)
+ with self.assertRaisesRegex(ValueError, r"Invalid code watcher ID 8"):
+ _testcapi.clear_code_watcher(8) # CODE_MAX_WATCHERS = 8
+
+ def test_clear_unassigned_watcher_id(self):
+ with self.assertRaisesRegex(ValueError, r"No code watcher set for ID 1"):
+ _testcapi.clear_code_watcher(1)
+
+ def test_allocate_too_many_watchers(self):
+ with self.assertRaisesRegex(RuntimeError, r"no more code watcher IDs available"):
+ _testcapi.allocate_too_many_code_watchers()
+
+
class TestFuncWatchers(unittest.TestCase):
@contextmanager
def add_watcher(self, func):
diff --git a/Lib/test/test_cmd_line_script.py b/Lib/test/test_cmd_line_script.py
index c838e95ad554e3..f10d72ea5547ee 100644
--- a/Lib/test/test_cmd_line_script.py
+++ b/Lib/test/test_cmd_line_script.py
@@ -753,6 +753,9 @@ def test_nonexisting_script(self):
self.assertNotEqual(proc.returncode, 0)
@unittest.skipUnless(os.path.exists('/dev/fd/0'), 'requires /dev/fd platform')
+ @unittest.skipIf(sys.platform.startswith("freebsd") and
+ os.stat("/dev").st_dev == os.stat("/dev/fd").st_dev,
+ "Requires fdescfs mounted on /dev/fd on FreeBSD")
def test_script_as_dev_fd(self):
# GH-87235: On macOS passing a non-trivial script to /dev/fd/N can cause
# problems because all open /dev/fd/N file descriptors share the same
diff --git a/Lib/test/test_code.py b/Lib/test/test_code.py
index 4e4d82314a9fb8..02ab8fbcdb0700 100644
--- a/Lib/test/test_code.py
+++ b/Lib/test/test_code.py
@@ -143,7 +143,7 @@
gc_collect)
from test.support.script_helper import assert_python_ok
from test.support import threading_helper
-from opcode import opmap
+from opcode import opmap, opname
COPY_FREE_VARS = opmap['COPY_FREE_VARS']
@@ -339,15 +339,19 @@ def func():
self.assertEqual(list(new_code.co_lines()), [])
def test_invalid_bytecode(self):
- def foo(): pass
- foo.__code__ = co = foo.__code__.replace(co_code=b'\xee\x00d\x00S\x00')
+ def foo():
+ pass
- with self.assertRaises(SystemError) as se:
- foo()
- self.assertEqual(
- f"{co.co_filename}:{co.co_firstlineno}: unknown opcode 238",
- str(se.exception))
+ # assert that opcode 238 is invalid
+ self.assertEqual(opname[238], '<238>')
+ # change first opcode to 0xee (=238)
+ foo.__code__ = foo.__code__.replace(
+ co_code=b'\xee' + foo.__code__.co_code[1:])
+
+ msg = f"unknown opcode 238"
+ with self.assertRaisesRegex(SystemError, msg):
+ foo()
@requires_debug_ranges()
def test_co_positions_artificial_instructions(self):
diff --git a/Lib/test/test_coroutines.py b/Lib/test/test_coroutines.py
index f91c9cc47741b5..43a3ff0536fe28 100644
--- a/Lib/test/test_coroutines.py
+++ b/Lib/test/test_coroutines.py
@@ -2418,7 +2418,8 @@ class UnawaitedWarningDuringShutdownTest(unittest.TestCase):
def test_unawaited_warning_during_shutdown(self):
code = ("import asyncio\n"
"async def f(): pass\n"
- "asyncio.gather(f())\n")
+ "async def t(): asyncio.gather(f())\n"
+ "asyncio.run(t())\n")
assert_python_ok("-c", code)
code = ("import sys\n"
diff --git a/Lib/test/test_csv.py b/Lib/test/test_csv.py
index d64bff13a44e87..8289ddb1c3a54f 100644
--- a/Lib/test/test_csv.py
+++ b/Lib/test/test_csv.py
@@ -762,6 +762,10 @@ def test_write_field_not_in_field_names_raise(self):
dictrow = {'f0': 0, 'f1': 1, 'f2': 2, 'f3': 3}
self.assertRaises(ValueError, csv.DictWriter.writerow, writer, dictrow)
+ # see bpo-44512 (differently cased 'raise' should not result in 'ignore')
+ writer = csv.DictWriter(fileobj, ['f1', 'f2'], extrasaction="RAISE")
+ self.assertRaises(ValueError, csv.DictWriter.writerow, writer, dictrow)
+
def test_write_field_not_in_field_names_ignore(self):
fileobj = StringIO()
writer = csv.DictWriter(fileobj, ['f1', 'f2'], extrasaction="ignore")
@@ -769,6 +773,10 @@ def test_write_field_not_in_field_names_ignore(self):
csv.DictWriter.writerow(writer, dictrow)
self.assertEqual(fileobj.getvalue(), "1,2\r\n")
+ # bpo-44512
+ writer = csv.DictWriter(fileobj, ['f1', 'f2'], extrasaction="IGNORE")
+ csv.DictWriter.writerow(writer, dictrow)
+
def test_dict_reader_fieldnames_accepts_iter(self):
fieldnames = ["a", "b", "c"]
f = StringIO()
diff --git a/Lib/test/test_enum.py b/Lib/test/test_enum.py
index b6082cf02b18d7..d876c8e5fb7798 100644
--- a/Lib/test/test_enum.py
+++ b/Lib/test/test_enum.py
@@ -2717,17 +2717,67 @@ def upper(self):
def test_repr_with_dataclass(self):
"ensure dataclass-mixin has correct repr()"
- from dataclasses import dataclass
- @dataclass
+ #
+ # check overridden dataclass __repr__ is used
+ #
+ from dataclasses import dataclass, field
+ @dataclass(repr=False)
class Foo:
__qualname__ = 'Foo'
a: int
+ def __repr__(self):
+ return 'ha hah!'
class Entries(Foo, Enum):
ENTRY1 = 1
self.assertTrue(isinstance(Entries.ENTRY1, Foo))
self.assertTrue(Entries._member_type_ is Foo, Entries._member_type_)
self.assertTrue(Entries.ENTRY1.value == Foo(1), Entries.ENTRY1.value)
- self.assertEqual(repr(Entries.ENTRY1), '')
+ self.assertEqual(repr(Entries.ENTRY1), '')
+ #
+ # check auto-generated dataclass __repr__ is not used
+ #
+ @dataclass
+ class CreatureDataMixin:
+ __qualname__ = 'CreatureDataMixin'
+ size: str
+ legs: int
+ tail: bool = field(repr=False, default=True)
+ class Creature(CreatureDataMixin, Enum):
+ __qualname__ = 'Creature'
+ BEETLE = ('small', 6)
+ DOG = ('medium', 4)
+ self.assertEqual(repr(Creature.DOG), "")
+ #
+ # check inherited repr used
+ #
+ class Huh:
+ def __repr__(self):
+ return 'inherited'
+ @dataclass(repr=False)
+ class CreatureDataMixin(Huh):
+ __qualname__ = 'CreatureDataMixin'
+ size: str
+ legs: int
+ tail: bool = field(repr=False, default=True)
+ class Creature(CreatureDataMixin, Enum):
+ __qualname__ = 'Creature'
+ BEETLE = ('small', 6)
+ DOG = ('medium', 4)
+ self.assertEqual(repr(Creature.DOG), "")
+ #
+ # check default object.__repr__ used if nothing provided
+ #
+ @dataclass(repr=False)
+ class CreatureDataMixin:
+ __qualname__ = 'CreatureDataMixin'
+ size: str
+ legs: int
+ tail: bool = field(repr=False, default=True)
+ class Creature(CreatureDataMixin, Enum):
+ __qualname__ = 'Creature'
+ BEETLE = ('small', 6)
+ DOG = ('medium', 4)
+ self.assertRegex(repr(Creature.DOG), "")
def test_repr_with_init_data_type_mixin(self):
# non-data_type is a mixin that doesn't define __new__
@@ -2791,6 +2841,19 @@ class MyIntFlag(IntFlag):
self.assertEqual(deep, flags)
self.assertEqual(copied.value, 1 | 2 | 8)
+ def test_namedtuple_as_value(self):
+ from collections import namedtuple
+ TTuple = namedtuple('TTuple', 'id a blist')
+ class NTEnum(Enum):
+ NONE = TTuple(0, 0, [])
+ A = TTuple(1, 2, [4])
+ B = TTuple(2, 4, [0, 1, 2])
+ self.assertEqual(repr(NTEnum.NONE), "")
+ self.assertEqual(NTEnum.NONE.value, TTuple(id=0, a=0, blist=[]))
+ self.assertEqual(
+ [x.value for x in NTEnum],
+ [TTuple(id=0, a=0, blist=[]), TTuple(id=1, a=2, blist=[4]), TTuple(id=2, a=4, blist=[0, 1, 2])],
+ )
class TestOrder(unittest.TestCase):
"test usage of the `_order_` attribute"
@@ -4503,11 +4566,6 @@ class Quadruple(Enum):
COMPLEX_A = 2j
COMPLEX_B = 3j
-class _ModuleWrapper:
- """We use this class as a namespace for swapping modules."""
- def __init__(self, module):
- self.__dict__.update(module.__dict__)
-
class TestConvert(unittest.TestCase):
def tearDown(self):
# Reset the module-level test variables to their original integer
@@ -4547,12 +4605,6 @@ def test_convert_int(self):
self.assertEqual(test_type.CONVERT_TEST_NAME_D, 5)
self.assertEqual(test_type.CONVERT_TEST_NAME_E, 5)
# Ensure that test_type only picked up names matching the filter.
- int_dir = dir(int) + [
- 'CONVERT_TEST_NAME_A', 'CONVERT_TEST_NAME_B', 'CONVERT_TEST_NAME_C',
- 'CONVERT_TEST_NAME_D', 'CONVERT_TEST_NAME_E', 'CONVERT_TEST_NAME_F',
- 'CONVERT_TEST_SIGABRT', 'CONVERT_TEST_SIGIOT',
- 'CONVERT_TEST_EIO', 'CONVERT_TEST_EBUS',
- ]
extra = [name for name in dir(test_type) if name not in enum_dir(test_type)]
missing = [name for name in enum_dir(test_type) if name not in dir(test_type)]
self.assertEqual(
@@ -4594,7 +4646,6 @@ def test_convert_str(self):
self.assertEqual(test_type.CONVERT_STR_TEST_1, 'hello')
self.assertEqual(test_type.CONVERT_STR_TEST_2, 'goodbye')
# Ensure that test_type only picked up names matching the filter.
- str_dir = dir(str) + ['CONVERT_STR_TEST_1', 'CONVERT_STR_TEST_2']
extra = [name for name in dir(test_type) if name not in enum_dir(test_type)]
missing = [name for name in enum_dir(test_type) if name not in dir(test_type)]
self.assertEqual(
@@ -4662,8 +4713,6 @@ def member_dir(member):
allowed.add(name)
return sorted(allowed)
-missing = object()
-
if __name__ == '__main__':
unittest.main()
diff --git a/Lib/test/test_frame.py b/Lib/test/test_frame.py
index a7db22007dedce..ed413f105e5b17 100644
--- a/Lib/test/test_frame.py
+++ b/Lib/test/test_frame.py
@@ -2,6 +2,7 @@
import re
import sys
import textwrap
+import threading
import types
import unittest
import weakref
@@ -11,6 +12,7 @@
_testcapi = None
from test import support
+from test.support import threading_helper
from test.support.script_helper import assert_python_ok
@@ -329,6 +331,46 @@ def f():
if old_enabled:
gc.enable()
+ @support.cpython_only
+ @threading_helper.requires_working_threading()
+ def test_sneaky_frame_object_teardown(self):
+
+ class SneakyDel:
+ def __del__(self):
+ """
+ Stash a reference to the entire stack for walking later.
+
+ It may look crazy, but you'd be surprised how common this is
+ when using a test runner (like pytest). The typical recipe is:
+ ResourceWarning + -Werror + a custom sys.unraisablehook.
+ """
+ nonlocal sneaky_frame_object
+ sneaky_frame_object = sys._getframe()
+
+ class SneakyThread(threading.Thread):
+ """
+ A separate thread isn't needed to make this code crash, but it does
+ make crashes more consistent, since it means sneaky_frame_object is
+ backed by freed memory after the thread completes!
+ """
+
+ def run(self):
+ """Run SneakyDel.__del__ as this frame is popped."""
+ ref = SneakyDel()
+
+ sneaky_frame_object = None
+ t = SneakyThread()
+ t.start()
+ t.join()
+ # sneaky_frame_object can be anything, really, but it's crucial that
+ # SneakyThread.run's frame isn't anywhere on the stack while it's being
+ # torn down:
+ self.assertIsNotNone(sneaky_frame_object)
+ while sneaky_frame_object is not None:
+ self.assertIsNot(
+ sneaky_frame_object.f_code, SneakyThread.run.__code__
+ )
+ sneaky_frame_object = sneaky_frame_object.f_back
@unittest.skipIf(_testcapi is None, 'need _testcapi')
class TestCAPI(unittest.TestCase):
diff --git a/Lib/test/test_httpservers.py b/Lib/test/test_httpservers.py
index a937258069ed89..ca078862cca6b9 100644
--- a/Lib/test/test_httpservers.py
+++ b/Lib/test/test_httpservers.py
@@ -26,7 +26,7 @@
import datetime
import threading
from unittest import mock
-from io import BytesIO
+from io import BytesIO, StringIO
import unittest
from test import support
@@ -990,6 +990,27 @@ def verify_http_server_response(self, response):
match = self.HTTPResponseMatch.search(response)
self.assertIsNotNone(match)
+ def test_unprintable_not_logged(self):
+ # We call the method from the class directly as our Socketless
+ # Handler subclass overrode it... nice for everything BUT this test.
+ self.handler.client_address = ('127.0.0.1', 1337)
+ log_message = BaseHTTPRequestHandler.log_message
+ with mock.patch.object(sys, 'stderr', StringIO()) as fake_stderr:
+ log_message(self.handler, '/foo')
+ log_message(self.handler, '/\033bar\000\033')
+ log_message(self.handler, '/spam %s.', 'a')
+ log_message(self.handler, '/spam %s.', '\033\x7f\x9f\xa0beans')
+ log_message(self.handler, '"GET /foo\\b"ar\007 HTTP/1.0"')
+ stderr = fake_stderr.getvalue()
+ self.assertNotIn('\033', stderr) # non-printable chars are caught.
+ self.assertNotIn('\000', stderr) # non-printable chars are caught.
+ lines = stderr.splitlines()
+ self.assertIn('/foo', lines[0])
+ self.assertIn(r'/\x1bbar\x00\x1b', lines[1])
+ self.assertIn('/spam a.', lines[2])
+ self.assertIn('/spam \\x1b\\x7f\\x9f\xa0beans.', lines[3])
+ self.assertIn(r'"GET /foo\\b"ar\x07 HTTP/1.0"', lines[4])
+
def test_http_1_1(self):
result = self.send_typical_request(b'GET / HTTP/1.1\r\n\r\n')
self.verify_http_server_response(result[0])
diff --git a/Lib/test/test_inspect.py b/Lib/test/test_inspect.py
index 3f5c299ce681c5..2b7977b1648f70 100644
--- a/Lib/test/test_inspect.py
+++ b/Lib/test/test_inspect.py
@@ -886,6 +886,12 @@ def test_class(self):
self.assertSourceEqual(self.fodderModule.X, 1, 2)
+class TestComplexDecorator(GetSourceBase):
+ fodderModule = mod2
+
+ def test_parens_in_decorator(self):
+ self.assertSourceEqual(self.fodderModule.complex_decorated, 273, 275)
+
class _BrokenDataDescriptor(object):
"""
A broken data descriptor. See bug #1785.
diff --git a/Lib/test/test_itertools.py b/Lib/test/test_itertools.py
index a0a740fba8e8e3..b447b6cbab9c22 100644
--- a/Lib/test/test_itertools.py
+++ b/Lib/test/test_itertools.py
@@ -161,11 +161,11 @@ def test_accumulate(self):
def test_batched(self):
self.assertEqual(list(batched('ABCDEFG', 3)),
- [['A', 'B', 'C'], ['D', 'E', 'F'], ['G']])
+ [('A', 'B', 'C'), ('D', 'E', 'F'), ('G',)])
self.assertEqual(list(batched('ABCDEFG', 2)),
- [['A', 'B'], ['C', 'D'], ['E', 'F'], ['G']])
+ [('A', 'B'), ('C', 'D'), ('E', 'F'), ('G',)])
self.assertEqual(list(batched('ABCDEFG', 1)),
- [['A'], ['B'], ['C'], ['D'], ['E'], ['F'], ['G']])
+ [('A',), ('B',), ('C',), ('D',), ('E',), ('F',), ('G',)])
with self.assertRaises(TypeError): # Too few arguments
list(batched('ABCDEFG'))
@@ -188,8 +188,8 @@ def test_batched(self):
with self.subTest(s=s, n=n, batches=batches):
# Order is preserved and no data is lost
self.assertEqual(''.join(chain(*batches)), s)
- # Each batch is an exact list
- self.assertTrue(all(type(batch) is list for batch in batches))
+ # Each batch is an exact tuple
+ self.assertTrue(all(type(batch) is tuple for batch in batches))
# All but the last batch is of size n
if batches:
last_batch = batches.pop()
@@ -675,6 +675,7 @@ def test_cycle(self):
self.assertRaises(TypeError, cycle, 5)
self.assertEqual(list(islice(cycle(gen3()),10)), [0,1,2,0,1,2,0,1,2,0])
+ def test_cycle_copy_pickle(self):
# check copy, deepcopy, pickle
c = cycle('abc')
self.assertEqual(next(c), 'a')
@@ -710,6 +711,37 @@ def test_cycle(self):
d = pickle.loads(p) # rebuild the cycle object
self.assertEqual(take(20, d), list('cdeabcdeabcdeabcdeab'))
+ def test_cycle_unpickle_compat(self):
+ testcases = [
+ b'citertools\ncycle\n(c__builtin__\niter\n((lI1\naI2\naI3\natRI1\nbtR((lI1\naI0\ntb.',
+ b'citertools\ncycle\n(c__builtin__\niter\n(](K\x01K\x02K\x03etRK\x01btR(]K\x01aK\x00tb.',
+ b'\x80\x02citertools\ncycle\nc__builtin__\niter\n](K\x01K\x02K\x03e\x85RK\x01b\x85R]K\x01aK\x00\x86b.',
+ b'\x80\x03citertools\ncycle\ncbuiltins\niter\n](K\x01K\x02K\x03e\x85RK\x01b\x85R]K\x01aK\x00\x86b.',
+ b'\x80\x04\x95=\x00\x00\x00\x00\x00\x00\x00\x8c\titertools\x8c\x05cycle\x93\x8c\x08builtins\x8c\x04iter\x93](K\x01K\x02K\x03e\x85RK\x01b\x85R]K\x01aK\x00\x86b.',
+
+ b'citertools\ncycle\n(c__builtin__\niter\n((lp0\nI1\naI2\naI3\natRI1\nbtR(g0\nI1\ntb.',
+ b'citertools\ncycle\n(c__builtin__\niter\n(]q\x00(K\x01K\x02K\x03etRK\x01btR(h\x00K\x01tb.',
+ b'\x80\x02citertools\ncycle\nc__builtin__\niter\n]q\x00(K\x01K\x02K\x03e\x85RK\x01b\x85Rh\x00K\x01\x86b.',
+ b'\x80\x03citertools\ncycle\ncbuiltins\niter\n]q\x00(K\x01K\x02K\x03e\x85RK\x01b\x85Rh\x00K\x01\x86b.',
+ b'\x80\x04\x95<\x00\x00\x00\x00\x00\x00\x00\x8c\titertools\x8c\x05cycle\x93\x8c\x08builtins\x8c\x04iter\x93]\x94(K\x01K\x02K\x03e\x85RK\x01b\x85Rh\x00K\x01\x86b.',
+
+ b'citertools\ncycle\n(c__builtin__\niter\n((lI1\naI2\naI3\natRI1\nbtR((lI1\naI00\ntb.',
+ b'citertools\ncycle\n(c__builtin__\niter\n(](K\x01K\x02K\x03etRK\x01btR(]K\x01aI00\ntb.',
+ b'\x80\x02citertools\ncycle\nc__builtin__\niter\n](K\x01K\x02K\x03e\x85RK\x01b\x85R]K\x01a\x89\x86b.',
+ b'\x80\x03citertools\ncycle\ncbuiltins\niter\n](K\x01K\x02K\x03e\x85RK\x01b\x85R]K\x01a\x89\x86b.',
+ b'\x80\x04\x95<\x00\x00\x00\x00\x00\x00\x00\x8c\titertools\x8c\x05cycle\x93\x8c\x08builtins\x8c\x04iter\x93](K\x01K\x02K\x03e\x85RK\x01b\x85R]K\x01a\x89\x86b.',
+
+ b'citertools\ncycle\n(c__builtin__\niter\n((lp0\nI1\naI2\naI3\natRI1\nbtR(g0\nI01\ntb.',
+ b'citertools\ncycle\n(c__builtin__\niter\n(]q\x00(K\x01K\x02K\x03etRK\x01btR(h\x00I01\ntb.',
+ b'\x80\x02citertools\ncycle\nc__builtin__\niter\n]q\x00(K\x01K\x02K\x03e\x85RK\x01b\x85Rh\x00\x88\x86b.',
+ b'\x80\x03citertools\ncycle\ncbuiltins\niter\n]q\x00(K\x01K\x02K\x03e\x85RK\x01b\x85Rh\x00\x88\x86b.',
+ b'\x80\x04\x95;\x00\x00\x00\x00\x00\x00\x00\x8c\titertools\x8c\x05cycle\x93\x8c\x08builtins\x8c\x04iter\x93]\x94(K\x01K\x02K\x03e\x85RK\x01b\x85Rh\x00\x88\x86b.',
+ ]
+ assert len(testcases) == 20
+ for t in testcases:
+ it = pickle.loads(t)
+ self.assertEqual(take(10, it), [2, 3, 1, 2, 3, 1, 2, 3, 1, 2])
+
def test_cycle_setstate(self):
# Verify both modes for restoring state
@@ -1777,12 +1809,12 @@ class TestPurePythonRoughEquivalents(unittest.TestCase):
def test_batched_recipe(self):
def batched_recipe(iterable, n):
- "Batch data into lists of length n. The last batch may be shorter."
+ "Batch data into tuples of length n. The last batch may be shorter."
# batched('ABCDEFG', 3) --> ABC DEF G
if n < 1:
raise ValueError('n must be at least one')
it = iter(iterable)
- while (batch := list(islice(it, n))):
+ while (batch := tuple(islice(it, n))):
yield batch
for iterable, n in product(
@@ -2055,7 +2087,7 @@ def test_accumulate(self):
def test_batched(self):
s = 'abcde'
- r = [['a', 'b'], ['c', 'd'], ['e']]
+ r = [('a', 'b'), ('c', 'd'), ('e',)]
n = 2
for g in (G, I, Ig, L, R):
with self.subTest(g=g):
diff --git a/Lib/test/test_marshal.py b/Lib/test/test_marshal.py
index 54c5a324897d23..3d9d6d5d0aca34 100644
--- a/Lib/test/test_marshal.py
+++ b/Lib/test/test_marshal.py
@@ -355,7 +355,7 @@ def test_deterministic_sets(self):
for elements in (
"float('nan'), b'a', b'b', b'c', 'x', 'y', 'z'",
# Also test for bad interactions with backreferencing:
- "('Spam', 0), ('Spam', 1), ('Spam', 2)",
+ "('Spam', 0), ('Spam', 1), ('Spam', 2), ('Spam', 3), ('Spam', 4), ('Spam', 5)",
):
s = f"{kind}([{elements}])"
with self.subTest(s):
diff --git a/Lib/test/test_minidom.py b/Lib/test/test_minidom.py
index ef38c362103fc6..2ca3908bd1caac 100644
--- a/Lib/test/test_minidom.py
+++ b/Lib/test/test_minidom.py
@@ -1163,14 +1163,10 @@ def testEncodings(self):
# Verify that character decoding errors raise exceptions instead
# of crashing
- if pyexpat.version_info >= (2, 4, 5):
- self.assertRaises(ExpatError, parseString,
- b'')
- self.assertRaises(ExpatError, parseString,
- b'Comment \xe7a va ? Tr\xe8s bien ?')
- else:
- self.assertRaises(UnicodeDecodeError, parseString,
- b'Comment \xe7a va ? Tr\xe8s bien ?')
+ with self.assertRaises((UnicodeDecodeError, ExpatError)):
+ parseString(
+ b'Comment \xe7a va ? Tr\xe8s bien ?'
+ )
doc.unlink()
@@ -1631,13 +1627,11 @@ def testEmptyXMLNSValue(self):
self.confirm(doc2.namespaceURI == xml.dom.EMPTY_NAMESPACE)
def testExceptionOnSpacesInXMLNSValue(self):
- if pyexpat.version_info >= (2, 4, 5):
- context = self.assertRaisesRegex(ExpatError, 'syntax error')
- else:
- context = self.assertRaisesRegex(ValueError, 'Unsupported syntax')
-
- with context:
- parseString('')
+ with self.assertRaises((ValueError, ExpatError)):
+ parseString(
+ '' +
+ ''
+ )
def testDocRemoveChild(self):
doc = parse(tstfile)
diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py
index 94db8bb7737acd..e0577916428a08 100644
--- a/Lib/test/test_os.py
+++ b/Lib/test/test_os.py
@@ -606,12 +606,13 @@ def test_stat_attributes_bytes(self):
def test_stat_result_pickle(self):
result = os.stat(self.fname)
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
- p = pickle.dumps(result, proto)
- self.assertIn(b'stat_result', p)
- if proto < 4:
- self.assertIn(b'cos\nstat_result\n', p)
- unpickled = pickle.loads(p)
- self.assertEqual(result, unpickled)
+ with self.subTest(f'protocol {proto}'):
+ p = pickle.dumps(result, proto)
+ self.assertIn(b'stat_result', p)
+ if proto < 4:
+ self.assertIn(b'cos\nstat_result\n', p)
+ unpickled = pickle.loads(p)
+ self.assertEqual(result, unpickled)
@unittest.skipUnless(hasattr(os, 'statvfs'), 'test needs os.statvfs()')
def test_statvfs_attributes(self):
diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py
index 442dec8b28065b..77f42f7f9c937b 100644
--- a/Lib/test/test_posix.py
+++ b/Lib/test/test_posix.py
@@ -1646,12 +1646,6 @@ def test_resetids(self):
)
support.wait_process(pid, exitcode=0)
- def test_resetids_wrong_type(self):
- with self.assertRaises(TypeError):
- self.spawn_func(sys.executable,
- [sys.executable, "-c", "pass"],
- os.environ, resetids=None)
-
def test_setpgroup(self):
pid = self.spawn_func(
sys.executable,
diff --git a/Lib/test/test_struct.py b/Lib/test/test_struct.py
index b0f11af1a7892e..6b1f22f66fd157 100644
--- a/Lib/test/test_struct.py
+++ b/Lib/test/test_struct.py
@@ -723,23 +723,56 @@ def test_issue35714(self):
struct.calcsize(s)
@support.cpython_only
- def test_issue45034_unsigned(self):
- _testcapi = import_helper.import_module('_testcapi')
- error_msg = f'ushort format requires 0 <= number <= {_testcapi.USHRT_MAX}'
- with self.assertRaisesRegex(struct.error, error_msg):
- struct.pack('H', 70000) # too large
- with self.assertRaisesRegex(struct.error, error_msg):
- struct.pack('H', -1) # too small
+ def test_issue98248(self):
+ def test_error_msg(prefix, int_type, is_unsigned):
+ fmt_str = prefix + int_type
+ size = struct.calcsize(fmt_str)
+ if is_unsigned:
+ max_ = 2 ** (size * 8) - 1
+ min_ = 0
+ else:
+ max_ = 2 ** (size * 8 - 1) - 1
+ min_ = -2 ** (size * 8 - 1)
+ error_msg = f"'{int_type}' format requires {min_} <= number <= {max_}"
+ for number in [int(-1e50), min_ - 1, max_ + 1, int(1e50)]:
+ with self.subTest(format_str=fmt_str, number=number):
+ with self.assertRaisesRegex(struct.error, error_msg):
+ struct.pack(fmt_str, number)
+ error_msg = "required argument is not an integer"
+ not_number = ""
+ with self.subTest(format_str=fmt_str, number=not_number):
+ with self.assertRaisesRegex(struct.error, error_msg):
+ struct.pack(fmt_str, not_number)
+
+ for prefix in '@=<>':
+ for int_type in 'BHILQ':
+ test_error_msg(prefix, int_type, True)
+ for int_type in 'bhilq':
+ test_error_msg(prefix, int_type, False)
+
+ int_type = 'N'
+ test_error_msg('@', int_type, True)
+
+ int_type = 'n'
+ test_error_msg('@', int_type, False)
@support.cpython_only
- def test_issue45034_signed(self):
- _testcapi = import_helper.import_module('_testcapi')
- error_msg = f'short format requires {_testcapi.SHRT_MIN} <= number <= {_testcapi.SHRT_MAX}'
- with self.assertRaisesRegex(struct.error, error_msg):
- struct.pack('h', 70000) # too large
- with self.assertRaisesRegex(struct.error, error_msg):
- struct.pack('h', -70000) # too small
-
+ def test_issue98248_error_propagation(self):
+ class Div0:
+ def __index__(self):
+ 1 / 0
+
+ def test_error_propagation(fmt_str):
+ with self.subTest(format_str=fmt_str, exception="ZeroDivisionError"):
+ with self.assertRaises(ZeroDivisionError):
+ struct.pack(fmt_str, Div0())
+
+ for prefix in '@=<>':
+ for int_type in 'BHILQbhilq':
+ test_error_propagation(prefix + int_type)
+
+ test_error_propagation('N')
+ test_error_propagation('n')
class UnpackIteratorTest(unittest.TestCase):
"""
diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py
index 424a4a93b6f972..abd0dd8b25699b 100644
--- a/Lib/test/test_subprocess.py
+++ b/Lib/test/test_subprocess.py
@@ -2832,7 +2832,7 @@ def test_close_fds(self):
@unittest.skipIf(sys.platform.startswith("freebsd") and
os.stat("/dev").st_dev == os.stat("/dev/fd").st_dev,
- "Requires fdescfs mounted on /dev/fd on FreeBSD.")
+ "Requires fdescfs mounted on /dev/fd on FreeBSD")
def test_close_fds_when_max_fd_is_lowered(self):
"""Confirm that issue21618 is fixed (may fail under valgrind)."""
fd_status = support.findfile("fd_status.py", subdir="subprocessdata")
@@ -3209,7 +3209,7 @@ def __int__(self):
1, 2, 3, 4,
True, True, 0,
None, None, None, -1,
- None, "no vfork")
+ None, True)
self.assertIn('fds_to_keep', str(c.exception))
finally:
if not gc_enabled:
diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py
index 78cac231929a61..cb284195d976ff 100644
--- a/Lib/test/test_syntax.py
+++ b/Lib/test/test_syntax.py
@@ -2145,6 +2145,22 @@ def test_error_parenthesis(self):
for paren in ")]}":
self._check_error(paren + "1 + 2", f"unmatched '\\{paren}'")
+ # Some more complex examples:
+ code = """\
+func(
+ a=["unclosed], # Need a quote in this comment: "
+ b=2,
+)
+"""
+ self._check_error(code, "parenthesis '\\)' does not match opening parenthesis '\\['")
+
+ def test_error_string_literal(self):
+
+ self._check_error("'blech", "unterminated string literal")
+ self._check_error('"blech', "unterminated string literal")
+ self._check_error("'''blech", "unterminated triple-quoted string literal")
+ self._check_error('"""blech', "unterminated triple-quoted string literal")
+
def test_invisible_characters(self):
self._check_error('print\x17("Hello")', "invalid non-printable character")
diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py
index da602b0199d52c..1cae1b0de7140f 100644
--- a/Lib/test/test_typing.py
+++ b/Lib/test/test_typing.py
@@ -7719,6 +7719,7 @@ class CustomerModel:
"eq_default": True,
"order_default": False,
"kw_only_default": True,
+ "frozen_default": False,
"field_specifiers": (),
"kwargs": {},
}
@@ -7749,6 +7750,7 @@ class CustomerModel(Decorated, frozen=True):
"eq_default": True,
"order_default": True,
"kw_only_default": False,
+ "frozen_default": False,
"field_specifiers": (),
"kwargs": {"make_everything_awesome": True},
}
@@ -7765,7 +7767,7 @@ def __new__(
return super().__new__(cls, name, bases, namespace)
Decorated = dataclass_transform(
- order_default=True, field_specifiers=(Field,)
+ order_default=True, frozen_default=True, field_specifiers=(Field,)
)(ModelMeta)
class ModelBase(metaclass=Decorated): ...
@@ -7780,6 +7782,7 @@ class CustomerModel(ModelBase, init=False):
"eq_default": True,
"order_default": True,
"kw_only_default": False,
+ "frozen_default": True,
"field_specifiers": (Field,),
"kwargs": {},
}
diff --git a/Lib/test/test_unary.py b/Lib/test/test_unary.py
index c3c17cc9f611dd..a45fbf6bd6bc54 100644
--- a/Lib/test/test_unary.py
+++ b/Lib/test/test_unary.py
@@ -8,7 +8,6 @@ def test_negative(self):
self.assertTrue(-2 == 0 - 2)
self.assertEqual(-0, 0)
self.assertEqual(--2, 2)
- self.assertTrue(-2 == 0 - 2)
self.assertTrue(-2.0 == 0 - 2.0)
self.assertTrue(-2j == 0 - 2j)
@@ -16,15 +15,13 @@ def test_positive(self):
self.assertEqual(+2, 2)
self.assertEqual(+0, 0)
self.assertEqual(++2, 2)
- self.assertEqual(+2, 2)
self.assertEqual(+2.0, 2.0)
self.assertEqual(+2j, 2j)
def test_invert(self):
- self.assertTrue(-2 == 0 - 2)
- self.assertEqual(-0, 0)
- self.assertEqual(--2, 2)
- self.assertTrue(-2 == 0 - 2)
+ self.assertTrue(~2 == -(2+1))
+ self.assertEqual(~0, -1)
+ self.assertEqual(~~2, 2)
def test_no_overflow(self):
nines = "9" * 32
diff --git a/Lib/test/test_unicodedata.py b/Lib/test/test_unicodedata.py
index a85bda3144bc35..74503c89e559a0 100644
--- a/Lib/test/test_unicodedata.py
+++ b/Lib/test/test_unicodedata.py
@@ -12,7 +12,8 @@
import unicodedata
import unittest
from test.support import (open_urlresource, requires_resource, script_helper,
- cpython_only, check_disallow_instantiation)
+ cpython_only, check_disallow_instantiation,
+ ResourceDenied)
class UnicodeMethodsTest(unittest.TestCase):
@@ -364,8 +365,8 @@ def test_normalization(self):
except PermissionError:
self.skipTest(f"Permission error when downloading {TESTDATAURL} "
f"into the test data directory")
- except (OSError, HTTPException):
- self.fail(f"Could not retrieve {TESTDATAURL}")
+ except (OSError, HTTPException) as exc:
+ self.skipTest(f"Failed to download {TESTDATAURL}: {exc}")
with testdata:
self.run_normalization_tests(testdata)
diff --git a/Lib/test/test_unittest/test_result.py b/Lib/test/test_unittest/test_result.py
index e71d114751d94d..efd9c902350506 100644
--- a/Lib/test/test_unittest/test_result.py
+++ b/Lib/test/test_unittest/test_result.py
@@ -275,6 +275,62 @@ def get_exc_info():
self.assertEqual(len(dropped), 1)
self.assertIn("raise self.failureException(msg)", dropped[0])
+ def test_addFailure_filter_traceback_frames_chained_exception_self_loop(self):
+ class Foo(unittest.TestCase):
+ def test_1(self):
+ pass
+
+ def get_exc_info():
+ try:
+ loop = Exception("Loop")
+ loop.__cause__ = loop
+ loop.__context__ = loop
+ raise loop
+ except:
+ return sys.exc_info()
+
+ exc_info_tuple = get_exc_info()
+
+ test = Foo('test_1')
+ result = unittest.TestResult()
+ result.startTest(test)
+ result.addFailure(test, exc_info_tuple)
+ result.stopTest(test)
+
+ formatted_exc = result.failures[0][1]
+ self.assertEqual(formatted_exc.count("Exception: Loop\n"), 1)
+
+ def test_addFailure_filter_traceback_frames_chained_exception_cycle(self):
+ class Foo(unittest.TestCase):
+ def test_1(self):
+ pass
+
+ def get_exc_info():
+ try:
+ # Create two directionally opposed cycles
+ # __cause__ in one direction, __context__ in the other
+ A, B, C = Exception("A"), Exception("B"), Exception("C")
+ edges = [(C, B), (B, A), (A, C)]
+ for ex1, ex2 in edges:
+ ex1.__cause__ = ex2
+ ex2.__context__ = ex1
+ raise C
+ except:
+ return sys.exc_info()
+
+ exc_info_tuple = get_exc_info()
+
+ test = Foo('test_1')
+ result = unittest.TestResult()
+ result.startTest(test)
+ result.addFailure(test, exc_info_tuple)
+ result.stopTest(test)
+
+ formatted_exc = result.failures[0][1]
+ self.assertEqual(formatted_exc.count("Exception: A\n"), 1)
+ self.assertEqual(formatted_exc.count("Exception: B\n"), 1)
+ self.assertEqual(formatted_exc.count("Exception: C\n"), 1)
+
# "addError(test, err)"
# ...
# "Called when the test case test raises an unexpected exception err
diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py
index f067560ca6caa1..2df74f5e6f99b2 100644
--- a/Lib/test/test_urllib.py
+++ b/Lib/test/test_urllib.py
@@ -1104,6 +1104,8 @@ def test_unquoting(self):
self.assertEqual(result.count('%'), 1,
"using unquote(): not all characters escaped: "
"%s" % result)
+
+ def test_unquote_rejects_none_and_tuple(self):
self.assertRaises((TypeError, AttributeError), urllib.parse.unquote, None)
self.assertRaises((TypeError, AttributeError), urllib.parse.unquote, ())
diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py
index 28f88412fdcaac..498c0382d2137b 100644
--- a/Lib/test/test_urllib2.py
+++ b/Lib/test/test_urllib2.py
@@ -1824,6 +1824,10 @@ def test_HTTPError_interface(self):
expected_errmsg = '' % (err.code, err.msg)
self.assertEqual(repr(err), expected_errmsg)
+ def test_gh_98778(self):
+ x = urllib.error.HTTPError("url", 405, "METHOD NOT ALLOWED", None, None)
+ self.assertEqual(getattr(x, "__notes__", ()), ())
+
def test_parse_proxy(self):
parse_proxy_test_cases = [
('proxy.example.com',
diff --git a/Lib/test/test_winreg.py b/Lib/test/test_winreg.py
index 8157c2da6efaa6..769ab67b0f5611 100644
--- a/Lib/test/test_winreg.py
+++ b/Lib/test/test_winreg.py
@@ -113,7 +113,6 @@ def _write_test_data(self, root_key, subkeystr="sub_key",
"does not close the actual key!")
except OSError:
pass
-
def _read_test_data(self, root_key, subkeystr="sub_key", OpenKey=OpenKey):
# Check we can get default value for this key.
val = QueryValue(root_key, test_key_name)
@@ -340,6 +339,23 @@ def test_setvalueex_value_range(self):
finally:
DeleteKey(HKEY_CURRENT_USER, test_key_name)
+ def test_setvalueex_negative_one_check(self):
+ # Test for Issue #43984, check -1 was not set by SetValueEx.
+ # Py2Reg, which gets called by SetValueEx, wasn't checking return
+ # value by PyLong_AsUnsignedLong, thus setting -1 as value in the registry.
+ # The implementation now checks PyLong_AsUnsignedLong return value to assure
+ # the value set was not -1.
+ try:
+ with CreateKey(HKEY_CURRENT_USER, test_key_name) as ck:
+ with self.assertRaises(OverflowError):
+ SetValueEx(ck, "test_name_dword", None, REG_DWORD, -1)
+ SetValueEx(ck, "test_name_qword", None, REG_QWORD, -1)
+ self.assertRaises(FileNotFoundError, QueryValueEx, ck, "test_name_dword")
+ self.assertRaises(FileNotFoundError, QueryValueEx, ck, "test_name_qword")
+
+ finally:
+ DeleteKey(HKEY_CURRENT_USER, test_key_name)
+
def test_queryvalueex_return_value(self):
# Test for Issue #16759, return unsigned int from QueryValueEx.
# Reg2Py, which gets called by QueryValueEx, was returning a value
diff --git a/Lib/typing.py b/Lib/typing.py
index 38e227e3c55d59..d9d6fbcdb8f068 100644
--- a/Lib/typing.py
+++ b/Lib/typing.py
@@ -3363,6 +3363,7 @@ def dataclass_transform(
eq_default: bool = True,
order_default: bool = False,
kw_only_default: bool = False,
+ frozen_default: bool = False,
field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = (),
**kwargs: Any,
) -> Callable[[T], T]:
@@ -3416,6 +3417,8 @@ class CustomerModel(ModelBase):
assumed to be True or False if it is omitted by the caller.
- ``kw_only_default`` indicates whether the ``kw_only`` parameter is
assumed to be True or False if it is omitted by the caller.
+ - ``frozen_default`` indicates whether the ``frozen`` parameter is
+ assumed to be True or False if it is omitted by the caller.
- ``field_specifiers`` specifies a static list of supported classes
or functions that describe fields, similar to ``dataclasses.field()``.
- Arbitrary other keyword arguments are accepted in order to allow for
@@ -3432,6 +3435,7 @@ def decorator(cls_or_fn):
"eq_default": eq_default,
"order_default": order_default,
"kw_only_default": kw_only_default,
+ "frozen_default": frozen_default,
"field_specifiers": field_specifiers,
"kwargs": kwargs,
}
diff --git a/Lib/unittest/result.py b/Lib/unittest/result.py
index 3da7005e603f4a..5ca4c23238b419 100644
--- a/Lib/unittest/result.py
+++ b/Lib/unittest/result.py
@@ -196,6 +196,7 @@ def _clean_tracebacks(self, exctype, value, tb, test):
ret = None
first = True
excs = [(exctype, value, tb)]
+ seen = {id(value)} # Detect loops in chained exceptions.
while excs:
(exctype, value, tb) = excs.pop()
# Skip test runner traceback levels
@@ -214,8 +215,9 @@ def _clean_tracebacks(self, exctype, value, tb, test):
if value is not None:
for c in (value.__cause__, value.__context__):
- if c is not None:
+ if c is not None and id(c) not in seen:
excs.append((type(c), c, c.__traceback__))
+ seen.add(id(c))
return ret
def _is_relevant_tb_level(self, tb):
diff --git a/Lib/urllib/error.py b/Lib/urllib/error.py
index 8cd901f13f8e49..feec0e7f848e46 100644
--- a/Lib/urllib/error.py
+++ b/Lib/urllib/error.py
@@ -10,7 +10,7 @@
an application may want to handle an exception like a regular
response.
"""
-
+import io
import urllib.response
__all__ = ['URLError', 'HTTPError', 'ContentTooShortError']
@@ -42,12 +42,9 @@ def __init__(self, url, code, msg, hdrs, fp):
self.hdrs = hdrs
self.fp = fp
self.filename = url
- # The addinfourl classes depend on fp being a valid file
- # object. In some cases, the HTTPError may not have a valid
- # file object. If this happens, the simplest workaround is to
- # not initialize the base classes.
- if fp is not None:
- self.__super_init(fp, hdrs, url, code)
+ if fp is None:
+ fp = io.StringIO()
+ self.__super_init(fp, hdrs, url, code)
def __str__(self):
return 'HTTP Error %s: %s' % (self.code, self.msg)
diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py
index 4f6867accbc0eb..5f95c5ff7f9c1c 100644
--- a/Lib/urllib/parse.py
+++ b/Lib/urllib/parse.py
@@ -600,6 +600,9 @@ def urldefrag(url):
def unquote_to_bytes(string):
"""unquote_to_bytes('abc%20def') -> b'abc def'."""
+ return bytes(_unquote_impl(string))
+
+def _unquote_impl(string: bytes | bytearray | str) -> bytes | bytearray:
# Note: strings are encoded as UTF-8. This is only an issue if it contains
# unescaped non-ASCII characters, which URIs should not.
if not string:
@@ -611,8 +614,8 @@ def unquote_to_bytes(string):
bits = string.split(b'%')
if len(bits) == 1:
return string
- res = [bits[0]]
- append = res.append
+ res = bytearray(bits[0])
+ append = res.extend
# Delay the initialization of the table to not waste memory
# if the function is never called
global _hextobyte
@@ -626,10 +629,20 @@ def unquote_to_bytes(string):
except KeyError:
append(b'%')
append(item)
- return b''.join(res)
+ return res
_asciire = re.compile('([\x00-\x7f]+)')
+def _generate_unquoted_parts(string, encoding, errors):
+ previous_match_end = 0
+ for ascii_match in _asciire.finditer(string):
+ start, end = ascii_match.span()
+ yield string[previous_match_end:start] # Non-ASCII
+ # The ascii_match[1] group == string[start:end].
+ yield _unquote_impl(ascii_match[1]).decode(encoding, errors)
+ previous_match_end = end
+ yield string[previous_match_end:] # Non-ASCII tail
+
def unquote(string, encoding='utf-8', errors='replace'):
"""Replace %xx escapes by their single-character equivalent. The optional
encoding and errors parameters specify how to decode percent-encoded
@@ -641,21 +654,16 @@ def unquote(string, encoding='utf-8', errors='replace'):
unquote('abc%20def') -> 'abc def'.
"""
if isinstance(string, bytes):
- return unquote_to_bytes(string).decode(encoding, errors)
+ return _unquote_impl(string).decode(encoding, errors)
if '%' not in string:
+ # Is it a string-like object?
string.split
return string
if encoding is None:
encoding = 'utf-8'
if errors is None:
errors = 'replace'
- bits = _asciire.split(string)
- res = [bits[0]]
- append = res.append
- for i in range(1, len(bits), 2):
- append(unquote_to_bytes(bits[i]).decode(encoding, errors))
- append(bits[i + 1])
- return ''.join(res)
+ return ''.join(_generate_unquoted_parts(string, encoding, errors))
def parse_qs(qs, keep_blank_values=False, strict_parsing=False,
diff --git a/Lib/venv/__init__.py b/Lib/venv/__init__.py
index 7bfc2d1b6fe057..978c98336f2b37 100644
--- a/Lib/venv/__init__.py
+++ b/Lib/venv/__init__.py
@@ -223,7 +223,7 @@ def symlink_or_copy(self, src, dst, relative_symlinks_ok=False):
force_copy = not self.symlinks
if not force_copy:
try:
- if not os.path.islink(dst): # can't link to itself!
+ if not os.path.islink(dst): # can't link to itself!
if relative_symlinks_ok:
assert os.path.dirname(src) == os.path.dirname(dst)
os.symlink(os.path.basename(src), dst)
@@ -418,11 +418,11 @@ def install_scripts(self, context, path):
binpath = context.bin_path
plen = len(path)
for root, dirs, files in os.walk(path):
- if root == path: # at top-level, remove irrelevant dirs
+ if root == path: # at top-level, remove irrelevant dirs
for d in dirs[:]:
if d not in ('common', os.name):
dirs.remove(d)
- continue # ignore files in top level
+ continue # ignore files in top level
for f in files:
if (os.name == 'nt' and f.startswith('python')
and f.endswith(('.exe', '.pdb'))):
@@ -468,83 +468,76 @@ def create(env_dir, system_site_packages=False, clear=False,
prompt=prompt, upgrade_deps=upgrade_deps)
builder.create(env_dir)
+
def main(args=None):
- compatible = True
- if sys.version_info < (3, 3):
- compatible = False
- elif not hasattr(sys, 'base_prefix'):
- compatible = False
- if not compatible:
- raise ValueError('This script is only for use with Python >= 3.3')
+ import argparse
+
+ parser = argparse.ArgumentParser(prog=__name__,
+ description='Creates virtual Python '
+ 'environments in one or '
+ 'more target '
+ 'directories.',
+ epilog='Once an environment has been '
+ 'created, you may wish to '
+ 'activate it, e.g. by '
+ 'sourcing an activate script '
+ 'in its bin directory.')
+ parser.add_argument('dirs', metavar='ENV_DIR', nargs='+',
+ help='A directory to create the environment in.')
+ parser.add_argument('--system-site-packages', default=False,
+ action='store_true', dest='system_site',
+ help='Give the virtual environment access to the '
+ 'system site-packages dir.')
+ if os.name == 'nt':
+ use_symlinks = False
else:
- import argparse
-
- parser = argparse.ArgumentParser(prog=__name__,
- description='Creates virtual Python '
- 'environments in one or '
- 'more target '
- 'directories.',
- epilog='Once an environment has been '
- 'created, you may wish to '
- 'activate it, e.g. by '
- 'sourcing an activate script '
- 'in its bin directory.')
- parser.add_argument('dirs', metavar='ENV_DIR', nargs='+',
- help='A directory to create the environment in.')
- parser.add_argument('--system-site-packages', default=False,
- action='store_true', dest='system_site',
- help='Give the virtual environment access to the '
- 'system site-packages dir.')
- if os.name == 'nt':
- use_symlinks = False
- else:
- use_symlinks = True
- group = parser.add_mutually_exclusive_group()
- group.add_argument('--symlinks', default=use_symlinks,
- action='store_true', dest='symlinks',
- help='Try to use symlinks rather than copies, '
- 'when symlinks are not the default for '
- 'the platform.')
- group.add_argument('--copies', default=not use_symlinks,
- action='store_false', dest='symlinks',
- help='Try to use copies rather than symlinks, '
- 'even when symlinks are the default for '
- 'the platform.')
- parser.add_argument('--clear', default=False, action='store_true',
- dest='clear', help='Delete the contents of the '
- 'environment directory if it '
- 'already exists, before '
- 'environment creation.')
- parser.add_argument('--upgrade', default=False, action='store_true',
- dest='upgrade', help='Upgrade the environment '
- 'directory to use this version '
- 'of Python, assuming Python '
- 'has been upgraded in-place.')
- parser.add_argument('--without-pip', dest='with_pip',
- default=True, action='store_false',
- help='Skips installing or upgrading pip in the '
- 'virtual environment (pip is bootstrapped '
- 'by default)')
- parser.add_argument('--prompt',
- help='Provides an alternative prompt prefix for '
- 'this environment.')
- parser.add_argument('--upgrade-deps', default=False, action='store_true',
- dest='upgrade_deps',
- help='Upgrade core dependencies: {} to the latest '
- 'version in PyPI'.format(
- ' '.join(CORE_VENV_DEPS)))
- options = parser.parse_args(args)
- if options.upgrade and options.clear:
- raise ValueError('you cannot supply --upgrade and --clear together.')
- builder = EnvBuilder(system_site_packages=options.system_site,
- clear=options.clear,
- symlinks=options.symlinks,
- upgrade=options.upgrade,
- with_pip=options.with_pip,
- prompt=options.prompt,
- upgrade_deps=options.upgrade_deps)
- for d in options.dirs:
- builder.create(d)
+ use_symlinks = True
+ group = parser.add_mutually_exclusive_group()
+ group.add_argument('--symlinks', default=use_symlinks,
+ action='store_true', dest='symlinks',
+ help='Try to use symlinks rather than copies, '
+ 'when symlinks are not the default for '
+ 'the platform.')
+ group.add_argument('--copies', default=not use_symlinks,
+ action='store_false', dest='symlinks',
+ help='Try to use copies rather than symlinks, '
+ 'even when symlinks are the default for '
+ 'the platform.')
+ parser.add_argument('--clear', default=False, action='store_true',
+ dest='clear', help='Delete the contents of the '
+ 'environment directory if it '
+ 'already exists, before '
+ 'environment creation.')
+ parser.add_argument('--upgrade', default=False, action='store_true',
+ dest='upgrade', help='Upgrade the environment '
+ 'directory to use this version '
+ 'of Python, assuming Python '
+ 'has been upgraded in-place.')
+ parser.add_argument('--without-pip', dest='with_pip',
+ default=True, action='store_false',
+ help='Skips installing or upgrading pip in the '
+ 'virtual environment (pip is bootstrapped '
+ 'by default)')
+ parser.add_argument('--prompt',
+ help='Provides an alternative prompt prefix for '
+ 'this environment.')
+ parser.add_argument('--upgrade-deps', default=False, action='store_true',
+ dest='upgrade_deps',
+ help=f'Upgrade core dependencies: {" ".join(CORE_VENV_DEPS)} '
+ 'to the latest version in PyPI')
+ options = parser.parse_args(args)
+ if options.upgrade and options.clear:
+ raise ValueError('you cannot supply --upgrade and --clear together.')
+ builder = EnvBuilder(system_site_packages=options.system_site,
+ clear=options.clear,
+ symlinks=options.symlinks,
+ upgrade=options.upgrade,
+ with_pip=options.with_pip,
+ prompt=options.prompt,
+ upgrade_deps=options.upgrade_deps)
+ for d in options.dirs:
+ builder.create(d)
+
if __name__ == '__main__':
rc = 1
diff --git a/Makefile.pre.in b/Makefile.pre.in
index f6df7a620deaed..dd6c3fbd1c6483 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -976,7 +976,8 @@ Programs/_testembed: Programs/_testembed.o $(LINK_PYTHON_DEPS)
BOOTSTRAP_HEADERS = \
Python/frozen_modules/importlib._bootstrap.h \
- Python/frozen_modules/importlib._bootstrap_external.h
+ Python/frozen_modules/importlib._bootstrap_external.h \
+ Python/frozen_modules/zipimport.h
Programs/_bootstrap_python.o: Programs/_bootstrap_python.c $(BOOTSTRAP_HEADERS) $(PYTHON_HEADERS)
@@ -1623,6 +1624,7 @@ PYTHON_HEADERS= \
$(srcdir)/Include/internal/pycore_bytesobject.h \
$(srcdir)/Include/internal/pycore_call.h \
$(srcdir)/Include/internal/pycore_ceval.h \
+ $(srcdir)/Include/internal/pycore_ceval_state.h \
$(srcdir)/Include/internal/pycore_code.h \
$(srcdir)/Include/internal/pycore_compile.h \
$(srcdir)/Include/internal/pycore_condvar.h \
@@ -1632,6 +1634,7 @@ PYTHON_HEADERS= \
$(srcdir)/Include/internal/pycore_descrobject.h \
$(srcdir)/Include/internal/pycore_dtoa.h \
$(srcdir)/Include/internal/pycore_exceptions.h \
+ $(srcdir)/Include/internal/pycore_faulthandler.h \
$(srcdir)/Include/internal/pycore_fileutils.h \
$(srcdir)/Include/internal/pycore_floatobject.h \
$(srcdir)/Include/internal/pycore_format.h \
@@ -1662,6 +1665,7 @@ PYTHON_HEADERS= \
$(srcdir)/Include/internal/pycore_pymem.h \
$(srcdir)/Include/internal/pycore_pymem_init.h \
$(srcdir)/Include/internal/pycore_pystate.h \
+ $(srcdir)/Include/internal/pycore_pythread.h \
$(srcdir)/Include/internal/pycore_range.h \
$(srcdir)/Include/internal/pycore_runtime.h \
$(srcdir)/Include/internal/pycore_runtime_init_generated.h \
@@ -1672,8 +1676,10 @@ PYTHON_HEADERS= \
$(srcdir)/Include/internal/pycore_structseq.h \
$(srcdir)/Include/internal/pycore_symtable.h \
$(srcdir)/Include/internal/pycore_sysmodule.h \
+ $(srcdir)/Include/internal/pycore_time.h \
$(srcdir)/Include/internal/pycore_token.h \
$(srcdir)/Include/internal/pycore_traceback.h \
+ $(srcdir)/Include/internal/pycore_tracemalloc.h \
$(srcdir)/Include/internal/pycore_tuple.h \
$(srcdir)/Include/internal/pycore_typeobject.h \
$(srcdir)/Include/internal/pycore_ucnhash.h \
diff --git a/Misc/ACKS b/Misc/ACKS
index 5d97067b85d3d4..d50cb3c2d1ee4f 100644
--- a/Misc/ACKS
+++ b/Misc/ACKS
@@ -1320,6 +1320,7 @@ Michele Orrù
Tomáš Orsava
Oleg Oshmyan
Denis Osipov
+Itamar Ostricher
Denis S. Otkidach
Peter Otten
Michael Otteneder
@@ -1627,6 +1628,7 @@ Silas Sewell
Ian Seyer
Dmitry Shachnev
Anish Shah
+Jaineel Shah
Daniel Shahaf
Hui Shang
Geoff Shannon
diff --git a/Misc/NEWS.d/3.12.0a3.rst b/Misc/NEWS.d/3.12.0a3.rst
new file mode 100644
index 00000000000000..3d1e43350d136e
--- /dev/null
+++ b/Misc/NEWS.d/3.12.0a3.rst
@@ -0,0 +1,836 @@
+.. date: 2022-12-05-01-39-10
+.. gh-issue: 100001
+.. nonce: uD05Fc
+.. release date: 2022-12-06
+.. section: Security
+
+``python -m http.server`` no longer allows terminal control characters sent
+within a garbage request to be printed to the stderr server log.
+
+This is done by changing the :mod:`http.server`
+:class:`BaseHTTPRequestHandler` ``.log_message`` method to replace control
+characters with a ``\xHH`` hex escape before printing.
+
+..
+
+.. date: 2022-11-11-12-50-28
+.. gh-issue: 87604
+.. nonce: OtwH5L
+.. section: Security
+
+Avoid publishing list of active per-interpreter audit hooks via the
+:mod:`gc` module
+
+..
+
+.. date: 2022-11-30-11-09-40
+.. gh-issue: 99891
+.. nonce: 9VomwB
+.. section: Core and Builtins
+
+Fix a bug in the tokenizer that could cause infinite recursion when showing
+syntax warnings that happen in the first line of the source. Patch by Pablo
+Galindo
+
+..
+
+.. date: 2022-11-27-13-50-13
+.. gh-issue: 91054
+.. nonce: oox_kW
+.. section: Core and Builtins
+
+Add :c:func:`PyCode_AddWatcher` and :c:func:`PyCode_ClearWatcher` APIs to
+register callbacks to receive notification on creation and destruction of
+code objects.
+
+..
+
+.. date: 2022-11-26-04-00-41
+.. gh-issue: 99729
+.. nonce: A3ovwQ
+.. section: Core and Builtins
+
+Fix an issue that could cause frames to be visible to Python code as they
+are being torn down, possibly leading to memory corruption or hard crashes
+of the interpreter.
+
+..
+
+.. date: 2022-11-23-18-16-18
+.. gh-issue: 99708
+.. nonce: 7MuaiR
+.. section: Core and Builtins
+
+Fix bug where compiler crashes on an if expression with an empty body block.
+
+..
+
+.. date: 2022-11-21-11-27-14
+.. gh-issue: 99578
+.. nonce: DcKoBJ
+.. section: Core and Builtins
+
+Fix a reference bug in :func:`_imp.create_builtin()` after the creation of
+the first sub-interpreter for modules ``builtins`` and ``sys``. Patch by
+Victor Stinner.
+
+..
+
+.. date: 2022-11-19-22-27-52
+.. gh-issue: 99581
+.. nonce: yKYPbf
+.. section: Core and Builtins
+
+Fixed a bug that was causing a buffer overflow if the tokenizer copies a
+line missing the newline caracter from a file that is as long as the
+available tokenizer buffer. Patch by Pablo galindo
+
+..
+
+.. date: 2022-11-18-11-24-25
+.. gh-issue: 99553
+.. nonce: F64h-n
+.. section: Core and Builtins
+
+Fix bug where an :exc:`ExceptionGroup` subclass can wrap a
+:exc:`BaseException`.
+
+..
+
+.. date: 2022-11-16-21-35-30
+.. gh-issue: 99547
+.. nonce: p_c_bp
+.. section: Core and Builtins
+
+Add a function to os.path to check if a path is a junction: isjunction. Add
+similar functionality to pathlib.Path as is_junction.
+
+..
+
+.. date: 2022-11-12-01-39-57
+.. gh-issue: 99370
+.. nonce: _cu32j
+.. section: Core and Builtins
+
+Fix zip path for venv created from a non-installed python on POSIX
+platforms.
+
+..
+
+.. date: 2022-11-11-14-04-01
+.. gh-issue: 99377
+.. nonce: -CJvWn
+.. section: Core and Builtins
+
+Add audit events for thread creation and clear operations.
+
+..
+
+.. date: 2022-11-10-17-09-16
+.. gh-issue: 98686
+.. nonce: bmAKwr
+.. section: Core and Builtins
+
+Remove the ``BINARY_OP_GENERIC`` and ``COMPARE_OP_GENERIC``
+"specializations".
+
+..
+
+.. date: 2022-11-10-16-53-40
+.. gh-issue: 99298
+.. nonce: HqRJES
+.. section: Core and Builtins
+
+Remove the remaining error paths for attribute specializations, and refuse
+to specialize attribute accesses on types that haven't had
+:c:func:`PyType_Ready` called on them yet.
+
+..
+
+.. date: 2022-11-05-22-26-35
+.. gh-issue: 99127
+.. nonce: Btk7ih
+.. section: Core and Builtins
+
+Allow some features of :mod:`syslog` to the main interpreter only. Patch by
+Dong-hee Na.
+
+..
+
+.. date: 2022-10-05-11-44-52
+.. gh-issue: 91053
+.. nonce: f5Bo3p
+.. section: Core and Builtins
+
+Optimizing interpreters and JIT compilers may need to invalidate internal
+metadata when functions are modified. This change adds the ability to
+provide a callback that will be invoked each time a function is created,
+modified, or destroyed.
+
+..
+
+.. date: 2022-09-17-17-08-01
+.. gh-issue: 90994
+.. nonce: f0H2Yd
+.. section: Core and Builtins
+
+Improve error messages when there's a syntax error with call arguments. The
+following three cases are covered: - No value is assigned to a named
+argument, eg ``foo(a=)``. - A value is assigned to a star argument, eg
+``foo(*args=[0])``. - A value is assigned to a double-star keyword argument,
+eg ``foo(**kwarg={'a': 0})``.
+
+..
+
+.. bpo: 45026
+.. date: 2021-08-29-15-55-19
+.. nonce: z7nTA3
+.. section: Core and Builtins
+
+Optimize the :class:`range` object iterator. It is now smaller, faster
+iteration of ranges containing large numbers. Smaller pickles, faster
+unpickling.
+
+..
+
+.. bpo: 31718
+.. date: 2020-02-23-23-48-15
+.. nonce: sXko5e
+.. section: Core and Builtins
+
+Raise :exc:`ValueError` instead of :exc:`SystemError` when methods of
+uninitialized :class:`io.IncrementalNewlineDecoder` objects are called.
+Patch by Oren Milman.
+
+..
+
+.. bpo: 38031
+.. date: 2019-09-04-19-09-49
+.. nonce: Yq4L72
+.. section: Core and Builtins
+
+Fix a possible assertion failure in :class:`io.FileIO` when the opener
+returns an invalid file descriptor.
+
+..
+
+.. date: 2022-12-05-13-40-15
+.. gh-issue: 100001
+.. nonce: 78ReYp
+.. section: Library
+
+Also \ escape \s in the http.server BaseHTTPRequestHandler.log_message so
+that it is technically possible to parse the line and reconstruct what the
+original data was. Without this a \xHH is ambiguious as to if it is a hex
+replacement we put in or the characters r"\x" came through in the original
+request line.
+
+..
+
+.. date: 2022-12-03-05-58-48
+.. gh-issue: 99957
+.. nonce: jLYYgN
+.. section: Library
+
+Add ``frozen_default`` parameter to :func:`typing.dataclass_transform`.
+
+..
+
+.. date: 2022-11-22-19-31-26
+.. gh-issue: 79033
+.. nonce: MW6kHq
+.. section: Library
+
+Fix :func:`asyncio.Server.wait_closed` to actually do what the docs promise
+-- wait for all existing connections to complete, after closing the server.
+
+..
+
+.. date: 2022-11-21-17-56-18
+.. gh-issue: 51524
+.. nonce: nTykx8
+.. section: Library
+
+Fix bug when calling trace.CoverageResults with valid infile.
+
+..
+
+.. date: 2022-11-21-13-49-03
+.. gh-issue: 99645
+.. nonce: 9w1QKq
+.. section: Library
+
+Fix a bug in handling class cleanups in :class:`unittest.TestCase`. Now
+``addClassCleanup()`` uses separate lists for different ``TestCase``
+subclasses, and ``doClassCleanups()`` only cleans up the particular class.
+
+..
+
+.. date: 2022-11-21-10-45-54
+.. gh-issue: 99508
+.. nonce: QqVbby
+.. section: Library
+
+Fix ``TypeError`` in ``Lib/importlib/_bootstrap_external.py`` while calling
+``_imp.source_hash()``.
+
+..
+
+.. date: 2022-11-17-10-56-47
+.. gh-issue: 66285
+.. nonce: KvjlaB
+.. section: Library
+
+Fix :mod:`asyncio` to not share event loop and signal wakeupfd in forked
+processes. Patch by Kumar Aditya.
+
+..
+
+.. date: 2022-11-15-10-55-24
+.. gh-issue: 97001
+.. nonce: KeQuVF
+.. section: Library
+
+Release the GIL when calling termios APIs to avoid blocking threads.
+
+..
+
+.. date: 2022-11-15-04-08-25
+.. gh-issue: 92647
+.. nonce: cZcjnJ
+.. section: Library
+
+Use final status of an enum to determine lookup or creation branch of
+functional API.
+
+..
+
+.. date: 2022-11-14-08-21-56
+.. gh-issue: 99388
+.. nonce: UWSlwp
+.. section: Library
+
+Add *loop_factory* parameter to :func:`asyncio.run` to allow specifying a
+custom event loop factory. Patch by Kumar Aditya.
+
+..
+
+.. date: 2022-11-13-02-06-56
+.. gh-issue: 99341
+.. nonce: 8-OlwB
+.. section: Library
+
+Fix :func:`ast.increment_lineno` to also cover :class:`ast.TypeIgnore` when
+changing line numbers.
+
+..
+
+.. date: 2022-11-12-12-15-30
+.. gh-issue: 99382
+.. nonce: dKg_rW
+.. section: Library
+
+Check the number of arguments in substitution in user generics containing a
+:class:`~typing.TypeVarTuple` and one or more :class:`~typing.TypeVar`.
+
+..
+
+.. date: 2022-11-12-12-10-23
+.. gh-issue: 99379
+.. nonce: bcGhxF
+.. section: Library
+
+Fix substitution of :class:`~typing.ParamSpec` followed by
+:class:`~typing.TypeVarTuple` in generic aliases.
+
+..
+
+.. date: 2022-11-12-12-08-34
+.. gh-issue: 99344
+.. nonce: 7M_u8G
+.. section: Library
+
+Fix substitution of :class:`~typing.TypeVarTuple` and
+:class:`~typing.ParamSpec` together in user generics.
+
+..
+
+.. date: 2022-11-09-12-36-12
+.. gh-issue: 99284
+.. nonce: 9p4J2l
+.. section: Library
+
+Remove ``_use_broken_old_ctypes_structure_semantics_`` old untested and
+undocumented hack from :mod:`ctypes`.
+
+..
+
+.. date: 2022-11-09-03-34-29
+.. gh-issue: 99201
+.. nonce: lDJ7xI
+.. section: Library
+
+Fix :exc:`IndexError` when initializing the config variables on Windows if
+``HAVE_DYNAMIC_LOADING`` is not set.
+
+..
+
+.. date: 2022-11-08-15-54-43
+.. gh-issue: 99240
+.. nonce: MhYwcz
+.. section: Library
+
+Fix double-free bug in Argument Clinic ``str_converter`` by extracting
+memory clean up to a new ``post_parsing`` section.
+
+..
+
+.. date: 2022-11-08-11-18-51
+.. gh-issue: 64490
+.. nonce: VcBgrN
+.. section: Library
+
+Fix refcount error when arguments are packed to tuple in Argument Clinic.
+
+..
+
+.. date: 2022-11-02-23-47-07
+.. gh-issue: 99029
+.. nonce: 7uCiIB
+.. section: Library
+
+:meth:`pathlib.PurePath.relative_to()` now treats naked Windows drive paths
+as relative. This brings its behaviour in line with other parts of pathlib.
+
+..
+
+.. date: 2022-10-24-11-01-05
+.. gh-issue: 98253
+.. nonce: HVd5v4
+.. section: Library
+
+The implementation of the typing module is now more resilient to reference
+leaks in binary extension modules.
+
+Previously, a reference leak in a typed C API-based extension module could
+leak internals of the typing module, which could in turn introduce leaks in
+essentially any other package with typed function signatures. Although the
+typing package is not the original source of the problem, such non-local
+dependences exacerbate debugging of large-scale projects, and the
+implementation was therefore changed to reduce harm by providing better
+isolation.
+
+..
+
+.. date: 2022-10-19-18-31-53
+.. gh-issue: 98458
+.. nonce: vwyq7O
+.. section: Library
+
+Fix infinite loop in unittest when a self-referencing chained exception is
+raised
+
+..
+
+.. date: 2022-10-19-13-37-23
+.. gh-issue: 93453
+.. nonce: wTB_sH
+.. section: Library
+
+:func:`asyncio.get_event_loop` and many other :mod:`asyncio` functions like
+:func:`asyncio.ensure_future`, :func:`asyncio.shield` or
+:func:`asyncio.gather`, and also the
+:meth:`~asyncio.BaseDefaultEventLoopPolicy.get_event_loop` method of
+:class:`asyncio.BaseDefaultEventLoopPolicy` now raise a :exc:`RuntimeError`
+if called when there is no running event loop and the current event loop was
+not set. Previously they implicitly created and set a new current event
+loop. :exc:`DeprecationWarning` is no longer emitted if there is no running
+event loop but the current event loop was set.
+
+..
+
+.. date: 2022-10-16-18-52-00
+.. gh-issue: 97966
+.. nonce: humlhz
+.. section: Library
+
+On ``uname_result``, restored expectation that ``_fields`` and ``_asdict``
+would include all six properties including ``processor``.
+
+..
+
+.. date: 2022-10-13-22-13-54
+.. gh-issue: 98248
+.. nonce: lwyygy
+.. section: Library
+
+Provide informative error messages in :func:`struct.pack` when its integral
+arguments are not in range.
+
+..
+
+.. date: 2022-10-08-19-20-33
+.. gh-issue: 98108
+.. nonce: WUObqM
+.. section: Library
+
+``zipfile.Path`` is now pickleable if its initialization parameters were
+pickleable (e.g. for file system paths).
+
+..
+
+.. date: 2022-10-08-15-41-00
+.. gh-issue: 98098
+.. nonce: DugpWi
+.. section: Library
+
+Created packages from zipfile and test_zipfile modules, separating
+``zipfile.Path`` functionality.
+
+..
+
+.. date: 2022-10-02-12-38-22
+.. gh-issue: 82836
+.. nonce: OvYLmC
+.. section: Library
+
+Fix :attr:`~ipaddress.IPv4Address.is_private` properties in the
+:mod:`ipaddress` module. Previously non-private networks (0.0.0.0/0) would
+return True from this method; now they correctly return False.
+
+..
+
+.. date: 2022-09-14-21-56-15
+.. gh-issue: 96828
+.. nonce: ZoOY5G
+.. section: Library
+
+Add an :data:`~ssl.OP_ENABLE_KTLS` option for enabling the use of the kernel
+TLS (kTLS). Patch by Illia Volochii.
+
+..
+
+.. date: 2022-08-06-12-18-07
+.. gh-issue: 88863
+.. nonce: NnqsuJ
+.. section: Library
+
+To avoid apparent memory leaks when :func:`asyncio.open_connection` raises,
+break reference cycles generated by local exception and future instances
+(which has exception instance as its member var). Patch by Dong Uk, Kang.
+
+..
+
+.. date: 2022-04-23-03-46-37
+.. gh-issue: 91078
+.. nonce: 87-hkp
+.. section: Library
+
+:meth:`TarFile.next` now returns ``None`` when called on an empty tarfile.
+
+..
+
+.. bpo: 47220
+.. date: 2022-04-04-22-54-11
+.. nonce: L9jYu4
+.. section: Library
+
+Document the optional *callback* parameter of :class:`WeakMethod`. Patch by
+Géry Ogam.
+
+..
+
+.. bpo: 44817
+.. date: 2021-08-03-05-31-00
+.. nonce: wOW_Qn
+.. section: Library
+
+Ignore WinError 53 (ERROR_BAD_NETPATH), 65 (ERROR_NETWORK_ACCESS_DENIED) and
+161 (ERROR_BAD_PATHNAME) when using ntpath.realpath().
+
+..
+
+.. bpo: 41260
+.. date: 2020-08-02-23-46-22
+.. nonce: Q2BNzY
+.. section: Library
+
+Rename the *fmt* parameter of the pure Python implementation of
+:meth:`datetime.date.strftime` to *format*.
+
+..
+
+.. bpo: 15999
+.. date: 2019-08-30-10-48-53
+.. nonce: QqsRRi
+.. section: Library
+
+All built-in functions now accept arguments of any type instead of just
+``bool`` and ``int`` for boolean parameters.
+
+..
+
+.. date: 2022-12-02-17-08-08
+.. gh-issue: 99931
+.. nonce: wC46hE
+.. section: Documentation
+
+Use `sphinxext-opengraph `__ to
+generate `OpenGraph metadata `__.
+
+..
+
+.. date: 2022-11-26-21-43-05
+.. gh-issue: 89682
+.. nonce: DhKoTM
+.. section: Documentation
+
+Reworded docstring of the default ``__contains__`` to clarify that it
+returns a :class:`bool`.
+
+..
+
+.. date: 2022-11-26-15-51-23
+.. gh-issue: 88330
+.. nonce: B_wFq8
+.. section: Documentation
+
+Improved the description of what a resource is in importlib.resources docs.
+
+..
+
+.. date: 2022-11-16-12-52-23
+.. gh-issue: 92892
+.. nonce: TS-P0j
+.. section: Documentation
+
+Document that calling variadic functions with ctypes requires special care
+on macOS/arm64 (and possibly other platforms).
+
+..
+
+.. bpo: 41825
+.. date: 2020-09-22-12-32-16
+.. nonce: npcaCb
+.. section: Documentation
+
+Restructured the documentation for the :func:`os.wait* ` family of
+functions, and improved the docs for :func:`os.waitid` with more explanation
+of the possible argument constants.
+
+..
+
+.. date: 2022-12-05-16-12-56
+.. gh-issue: 99892
+.. nonce: sz_eW8
+.. section: Tests
+
+Skip test_normalization() of test_unicodedata if it fails to download
+NormalizationTest.txt file from pythontest.net. Patch by Victor Stinner.
+
+..
+
+.. date: 2022-12-01-18-55-18
+.. gh-issue: 99934
+.. nonce: Ox3Fqf
+.. section: Tests
+
+Correct test_marsh on (32 bit) x86: test_deterministic sets was failing.
+
+..
+
+.. date: 2022-11-23-18-32-16
+.. gh-issue: 99741
+.. nonce: q4R7NH
+.. section: Tests
+
+We've implemented multi-phase init (PEP 489/630/687) for the internal (for
+testing) _xxsubinterpreters module.
+
+..
+
+.. date: 2022-11-21-19-21-30
+.. gh-issue: 99659
+.. nonce: 4gP0nm
+.. section: Tests
+
+Optional big memory tests in ``test_sqlite3`` now catch the correct
+:exc:`sqlite.DataError` exception type in case of too large strings and/or
+blobs passed.
+
+..
+
+.. date: 2022-11-19-13-34-28
+.. gh-issue: 99593
+.. nonce: 8ZfCkj
+.. section: Tests
+
+Cover the Unicode C API with tests.
+
+..
+
+.. date: 2022-08-22-15-49-14
+.. gh-issue: 96002
+.. nonce: 4UE9UE
+.. section: Tests
+
+Add functional test for Argument Clinic.
+
+..
+
+.. date: 2022-11-24-02-58-10
+.. gh-issue: 99086
+.. nonce: DV_4Br
+.. section: Build
+
+Fix ``-Wimplicit-int``, ``-Wstrict-prototypes``, and
+``-Wimplicit-function-declaration`` compiler warnings in
+:program:`configure` checks.
+
+..
+
+.. date: 2022-11-15-08-40-22
+.. gh-issue: 99337
+.. nonce: 5LoQDE
+.. section: Build
+
+Fix a compilation issue with GCC 12 on macOS.
+
+..
+
+.. date: 2022-11-09-14-42-48
+.. gh-issue: 99289
+.. nonce: X7wFE1
+.. section: Build
+
+Add a ``COMPILEALL_OPTS`` variable in Makefile to override :mod:`compileall`
+options (default: ``-j0``) in ``make install``. Also merged the
+``compileall`` commands into a single command building .pyc files for the
+all optimization levels (0, 1, 2) at once. Patch by Victor Stinner.
+
+..
+
+.. date: 2022-11-03-08-10-49
+.. gh-issue: 98872
+.. nonce: gdsR8X
+.. section: Build
+
+Fix a possible fd leak in ``Programs/_freeze_module.c`` introduced in Python
+3.11.
+
+..
+
+.. date: 2022-10-16-12-49-24
+.. gh-issue: 88226
+.. nonce: BsnQ4k
+.. section: Build
+
+Always define ``TARGET_*`` labels in ``Python/ceval.c``, even if
+``USE_COMPUTED_GOTOS`` is disabled. This allows breakpoints to be set at
+those labels in (for instance) ``gdb``.
+
+..
+
+.. date: 2022-11-23-17-17-16
+.. gh-issue: 99345
+.. nonce: jOa3-f
+.. section: Windows
+
+Use faster initialization functions to detect install location for Windows
+Store package
+
+..
+
+.. date: 2022-11-21-19-50-18
+.. gh-issue: 98629
+.. nonce: tMmB_B
+.. section: Windows
+
+Fix initialization of :data:`sys.version` and ``sys._git`` on Windows
+
+..
+
+.. date: 2022-11-16-19-03-21
+.. gh-issue: 99442
+.. nonce: 6Dgk3Q
+.. section: Windows
+
+Fix handling in :ref:`launcher` when ``argv[0]`` does not include a file
+extension.
+
+..
+
+.. bpo: 40882
+.. date: 2020-06-06-15-10-37
+.. nonce: UvNbdj
+.. section: Windows
+
+Fix a memory leak in :class:`multiprocessing.shared_memory.SharedMemory` on
+Windows.
+
+..
+
+.. date: 2022-11-25-09-23-20
+.. gh-issue: 87235
+.. nonce: SifjCD
+.. section: macOS
+
+On macOS ``python3 /dev/fd/9 9` family of functions,
-and improved the docs for :func:`os.waitid` with more explanation of the
-possible argument constants.
diff --git a/Misc/NEWS.d/next/Documentation/2022-11-16-12-52-23.gh-issue-92892.TS-P0j.rst b/Misc/NEWS.d/next/Documentation/2022-11-16-12-52-23.gh-issue-92892.TS-P0j.rst
deleted file mode 100644
index 54e421d19d9da3..00000000000000
--- a/Misc/NEWS.d/next/Documentation/2022-11-16-12-52-23.gh-issue-92892.TS-P0j.rst
+++ /dev/null
@@ -1 +0,0 @@
-Document that calling variadic functions with ctypes requires special care on macOS/arm64 (and possibly other platforms).
diff --git a/Misc/NEWS.d/next/Documentation/2022-11-26-15-51-23.gh-issue-88330.B_wFq8.rst b/Misc/NEWS.d/next/Documentation/2022-11-26-15-51-23.gh-issue-88330.B_wFq8.rst
deleted file mode 100644
index 0f242eecc31258..00000000000000
--- a/Misc/NEWS.d/next/Documentation/2022-11-26-15-51-23.gh-issue-88330.B_wFq8.rst
+++ /dev/null
@@ -1 +0,0 @@
-Improved the description of what a resource is in importlib.resources docs.
diff --git a/Misc/NEWS.d/next/Documentation/2022-11-26-21-43-05.gh-issue-89682.DhKoTM.rst b/Misc/NEWS.d/next/Documentation/2022-11-26-21-43-05.gh-issue-89682.DhKoTM.rst
deleted file mode 100644
index 46be065b653952..00000000000000
--- a/Misc/NEWS.d/next/Documentation/2022-11-26-21-43-05.gh-issue-89682.DhKoTM.rst
+++ /dev/null
@@ -1 +0,0 @@
-Reworded docstring of the default ``__contains__`` to clarify that it returns a :class:`bool`.
diff --git a/Misc/NEWS.d/next/Library/2020-08-02-23-46-22.bpo-41260.Q2BNzY.rst b/Misc/NEWS.d/next/Library/2020-08-02-23-46-22.bpo-41260.Q2BNzY.rst
deleted file mode 100644
index ae2fdd9b84a00e..00000000000000
--- a/Misc/NEWS.d/next/Library/2020-08-02-23-46-22.bpo-41260.Q2BNzY.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Rename the *fmt* parameter of the pure Python implementation of
-:meth:`datetime.date.strftime` to *format*.
diff --git a/Misc/NEWS.d/next/Library/2022-04-04-22-54-11.bpo-47220.L9jYu4.rst b/Misc/NEWS.d/next/Library/2022-04-04-22-54-11.bpo-47220.L9jYu4.rst
deleted file mode 100644
index 6e2af088640b55..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-04-04-22-54-11.bpo-47220.L9jYu4.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Document the optional *callback* parameter of :class:`WeakMethod`. Patch by
-Géry Ogam.
diff --git a/Misc/NEWS.d/next/Library/2022-04-23-03-46-37.gh-issue-91078.87-hkp.rst b/Misc/NEWS.d/next/Library/2022-04-23-03-46-37.gh-issue-91078.87-hkp.rst
deleted file mode 100644
index e05d5e2a13146c..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-04-23-03-46-37.gh-issue-91078.87-hkp.rst
+++ /dev/null
@@ -1 +0,0 @@
-:meth:`TarFile.next` now returns ``None`` when called on an empty tarfile.
diff --git a/Misc/NEWS.d/next/Library/2022-05-06-01-53-34.gh-issue-92122.96Lf2p.rst b/Misc/NEWS.d/next/Library/2022-05-06-01-53-34.gh-issue-92122.96Lf2p.rst
new file mode 100644
index 00000000000000..d585535ee38d20
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2022-05-06-01-53-34.gh-issue-92122.96Lf2p.rst
@@ -0,0 +1 @@
+Fix reStructuredText syntax errors in docstrings in the :mod:`enum` module.
diff --git a/Misc/NEWS.d/next/Library/2022-08-06-12-18-07.gh-issue-88863.NnqsuJ.rst b/Misc/NEWS.d/next/Library/2022-08-06-12-18-07.gh-issue-88863.NnqsuJ.rst
deleted file mode 100644
index 23f8cb01cf0ac8..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-08-06-12-18-07.gh-issue-88863.NnqsuJ.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-To avoid apparent memory leaks when :func:`asyncio.open_connection` raises,
-break reference cycles generated by local exception and future instances
-(which has exception instance as its member var). Patch by Dong Uk, Kang.
diff --git a/Misc/NEWS.d/next/Library/2022-09-14-21-56-15.gh-issue-96828.ZoOY5G.rst b/Misc/NEWS.d/next/Library/2022-09-14-21-56-15.gh-issue-96828.ZoOY5G.rst
deleted file mode 100644
index d8a448851f4779..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-09-14-21-56-15.gh-issue-96828.ZoOY5G.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Add an :data:`~ssl.OP_ENABLE_KTLS` option for enabling the use of the kernel
-TLS (kTLS). Patch by Illia Volochii.
diff --git a/Misc/NEWS.d/next/Library/2022-09-16-08-21-46.gh-issue-88500.jQ0pCc.rst b/Misc/NEWS.d/next/Library/2022-09-16-08-21-46.gh-issue-88500.jQ0pCc.rst
new file mode 100644
index 00000000000000..ad01f5e16b16a9
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2022-09-16-08-21-46.gh-issue-88500.jQ0pCc.rst
@@ -0,0 +1,2 @@
+Reduced the memory usage of :func:`urllib.parse.unquote` and
+:func:`urllib.parse.unquote_to_bytes` on large values.
diff --git a/Misc/NEWS.d/next/Library/2022-10-02-12-38-22.gh-issue-82836.OvYLmC.rst b/Misc/NEWS.d/next/Library/2022-10-02-12-38-22.gh-issue-82836.OvYLmC.rst
deleted file mode 100644
index dcbea66d66bf7c..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-10-02-12-38-22.gh-issue-82836.OvYLmC.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix :attr:`~ipaddress.IPv4Address.is_private` properties in the :mod:`ipaddress` module. Previously non-private networks (0.0.0.0/0) would return True from this method; now they correctly return False.
diff --git a/Misc/NEWS.d/next/Library/2022-10-07-18-16-00.gh-issue-98030.2oQCZy.rst b/Misc/NEWS.d/next/Library/2022-10-07-18-16-00.gh-issue-98030.2oQCZy.rst
new file mode 100644
index 00000000000000..7768ed0817e8fa
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2022-10-07-18-16-00.gh-issue-98030.2oQCZy.rst
@@ -0,0 +1,7 @@
+Add missing TCP socket options from Linux: ``TCP_MD5SIG``,
+``TCP_THIN_LINEAR_TIMEOUTS``, ``TCP_THIN_DUPACK``, ``TCP_REPAIR``,
+``TCP_REPAIR_QUEUE``, ``TCP_QUEUE_SEQ``, ``TCP_REPAIR_OPTIONS``,
+``TCP_TIMESTAMP``, ``TCP_CC_INFO``, ``TCP_SAVE_SYN``, ``TCP_SAVED_SYN``,
+``TCP_REPAIR_WINDOW``, ``TCP_FASTOPEN_CONNECT``, ``TCP_ULP``,
+``TCP_MD5SIG_EXT``, ``TCP_FASTOPEN_KEY``, ``TCP_FASTOPEN_NO_COOKIE``,
+``TCP_ZEROCOPY_RECEIVE``, ``TCP_INQ``, ``TCP_TX_DELAY``.
diff --git a/Misc/NEWS.d/next/Library/2022-10-08-15-41-00.gh-issue-98098.DugpWi.rst b/Misc/NEWS.d/next/Library/2022-10-08-15-41-00.gh-issue-98098.DugpWi.rst
deleted file mode 100644
index 202275e16ea081..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-10-08-15-41-00.gh-issue-98098.DugpWi.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Created packages from zipfile and test_zipfile modules, separating
-``zipfile.Path`` functionality.
diff --git a/Misc/NEWS.d/next/Library/2022-10-08-19-20-33.gh-issue-98108.WUObqM.rst b/Misc/NEWS.d/next/Library/2022-10-08-19-20-33.gh-issue-98108.WUObqM.rst
deleted file mode 100644
index 7e962580dda228..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-10-08-19-20-33.gh-issue-98108.WUObqM.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-``zipfile.Path`` is now pickleable if its initialization parameters were
-pickleable (e.g. for file system paths).
diff --git a/Misc/NEWS.d/next/Library/2022-10-16-18-52-00.gh-issue-97966.humlhz.rst b/Misc/NEWS.d/next/Library/2022-10-16-18-52-00.gh-issue-97966.humlhz.rst
deleted file mode 100644
index b725465ae4f0ef..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-10-16-18-52-00.gh-issue-97966.humlhz.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-On ``uname_result``, restored expectation that ``_fields`` and ``_asdict``
-would include all six properties including ``processor``.
diff --git a/Misc/NEWS.d/next/Library/2022-10-24-11-01-05.gh-issue-98253.HVd5v4.rst b/Misc/NEWS.d/next/Library/2022-10-24-11-01-05.gh-issue-98253.HVd5v4.rst
deleted file mode 100644
index 00df0070f3b9c1..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-10-24-11-01-05.gh-issue-98253.HVd5v4.rst
+++ /dev/null
@@ -1,10 +0,0 @@
-The implementation of the typing module is now more resilient to reference
-leaks in binary extension modules.
-
-Previously, a reference leak in a typed C API-based extension module could leak
-internals of the typing module, which could in turn introduce leaks in
-essentially any other package with typed function signatures. Although the
-typing package is not the original source of the problem, such non-local
-dependences exacerbate debugging of large-scale projects, and the
-implementation was therefore changed to reduce harm by providing better
-isolation.
diff --git a/Misc/NEWS.d/next/Library/2022-11-02-23-47-07.gh-issue-99029.7uCiIB.rst b/Misc/NEWS.d/next/Library/2022-11-02-23-47-07.gh-issue-99029.7uCiIB.rst
deleted file mode 100644
index 0bfba5e1e32662..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-02-23-47-07.gh-issue-99029.7uCiIB.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-:meth:`pathlib.PurePath.relative_to()` now treats naked Windows drive paths
-as relative. This brings its behaviour in line with other parts of pathlib.
diff --git a/Misc/NEWS.d/next/Library/2022-11-08-11-18-51.gh-issue-64490.VcBgrN.rst b/Misc/NEWS.d/next/Library/2022-11-08-11-18-51.gh-issue-64490.VcBgrN.rst
deleted file mode 100644
index f98c181cc9c54b..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-08-11-18-51.gh-issue-64490.VcBgrN.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix refcount error when arguments are packed to tuple in Argument Clinic.
diff --git a/Misc/NEWS.d/next/Library/2022-11-08-15-54-43.gh-issue-99240.MhYwcz.rst b/Misc/NEWS.d/next/Library/2022-11-08-15-54-43.gh-issue-99240.MhYwcz.rst
deleted file mode 100644
index 0a4db052755f87..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-08-15-54-43.gh-issue-99240.MhYwcz.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Fix double-free bug in Argument Clinic ``str_converter`` by
-extracting memory clean up to a new ``post_parsing`` section.
diff --git a/Misc/NEWS.d/next/Library/2022-11-09-03-34-29.gh-issue-99201.lDJ7xI.rst b/Misc/NEWS.d/next/Library/2022-11-09-03-34-29.gh-issue-99201.lDJ7xI.rst
deleted file mode 100644
index 6d03574fdaf5bf..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-09-03-34-29.gh-issue-99201.lDJ7xI.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Fix :exc:`IndexError` when initializing the config variables on Windows if
-``HAVE_DYNAMIC_LOADING`` is not set.
diff --git a/Misc/NEWS.d/next/Library/2022-11-09-12-36-12.gh-issue-99284.9p4J2l.rst b/Misc/NEWS.d/next/Library/2022-11-09-12-36-12.gh-issue-99284.9p4J2l.rst
deleted file mode 100644
index 48576bd457aa0d..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-09-12-36-12.gh-issue-99284.9p4J2l.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Remove ``_use_broken_old_ctypes_structure_semantics_``
-old untested and undocumented hack from :mod:`ctypes`.
diff --git a/Misc/NEWS.d/next/Library/2022-11-12-12-08-34.gh-issue-99344.7M_u8G.rst b/Misc/NEWS.d/next/Library/2022-11-12-12-08-34.gh-issue-99344.7M_u8G.rst
deleted file mode 100644
index 412c8c793435af..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-12-12-08-34.gh-issue-99344.7M_u8G.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Fix substitution of :class:`~typing.TypeVarTuple` and
-:class:`~typing.ParamSpec` together in user generics.
diff --git a/Misc/NEWS.d/next/Library/2022-11-12-12-10-23.gh-issue-99379.bcGhxF.rst b/Misc/NEWS.d/next/Library/2022-11-12-12-10-23.gh-issue-99379.bcGhxF.rst
deleted file mode 100644
index 1950680b1df86c..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-12-12-10-23.gh-issue-99379.bcGhxF.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Fix substitution of :class:`~typing.ParamSpec` followed by
-:class:`~typing.TypeVarTuple` in generic aliases.
diff --git a/Misc/NEWS.d/next/Library/2022-11-12-12-15-30.gh-issue-99382.dKg_rW.rst b/Misc/NEWS.d/next/Library/2022-11-12-12-15-30.gh-issue-99382.dKg_rW.rst
deleted file mode 100644
index f153f2fceac844..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-12-12-15-30.gh-issue-99382.dKg_rW.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Check the number of arguments in substitution in user generics containing a
-:class:`~typing.TypeVarTuple` and one or more :class:`~typing.TypeVar`.
diff --git a/Misc/NEWS.d/next/Library/2022-11-13-02-06-56.gh-issue-99341.8-OlwB.rst b/Misc/NEWS.d/next/Library/2022-11-13-02-06-56.gh-issue-99341.8-OlwB.rst
deleted file mode 100644
index 451561c579daff..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-13-02-06-56.gh-issue-99341.8-OlwB.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Fix :func:`ast.increment_lineno` to also cover :class:`ast.TypeIgnore` when
-changing line numbers.
diff --git a/Misc/NEWS.d/next/Library/2022-11-14-08-21-56.gh-issue-99388.UWSlwp.rst b/Misc/NEWS.d/next/Library/2022-11-14-08-21-56.gh-issue-99388.UWSlwp.rst
deleted file mode 100644
index f35799d454573e..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-14-08-21-56.gh-issue-99388.UWSlwp.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Add *loop_factory* parameter to :func:`asyncio.run` to allow specifying a custom event loop factory.
-Patch by Kumar Aditya.
diff --git a/Misc/NEWS.d/next/Library/2022-11-15-04-08-25.gh-issue-92647.cZcjnJ.rst b/Misc/NEWS.d/next/Library/2022-11-15-04-08-25.gh-issue-92647.cZcjnJ.rst
deleted file mode 100644
index c6e2a0ca25ff2a..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-15-04-08-25.gh-issue-92647.cZcjnJ.rst
+++ /dev/null
@@ -1 +0,0 @@
-Use final status of an enum to determine lookup or creation branch of functional API.
diff --git a/Misc/NEWS.d/next/Library/2022-11-15-10-55-24.gh-issue-97001.KeQuVF.rst b/Misc/NEWS.d/next/Library/2022-11-15-10-55-24.gh-issue-97001.KeQuVF.rst
deleted file mode 100644
index 014161cf7b1d44..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-15-10-55-24.gh-issue-97001.KeQuVF.rst
+++ /dev/null
@@ -1 +0,0 @@
-Release the GIL when calling termios APIs to avoid blocking threads.
diff --git a/Misc/NEWS.d/next/Library/2022-11-17-10-56-47.gh-issue-66285.KvjlaB.rst b/Misc/NEWS.d/next/Library/2022-11-17-10-56-47.gh-issue-66285.KvjlaB.rst
deleted file mode 100644
index ebd82173882726..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-17-10-56-47.gh-issue-66285.KvjlaB.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix :mod:`asyncio` to not share event loop and signal wakeupfd in forked processes. Patch by Kumar Aditya.
diff --git a/Misc/NEWS.d/next/Library/2022-11-21-10-45-54.gh-issue-99508.QqVbby.rst b/Misc/NEWS.d/next/Library/2022-11-21-10-45-54.gh-issue-99508.QqVbby.rst
deleted file mode 100644
index 82720d17bcafd3..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-21-10-45-54.gh-issue-99508.QqVbby.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Fix ``TypeError`` in ``Lib/importlib/_bootstrap_external.py`` while calling
-``_imp.source_hash()``.
diff --git a/Misc/NEWS.d/next/Library/2022-11-21-13-49-03.gh-issue-99645.9w1QKq.rst b/Misc/NEWS.d/next/Library/2022-11-21-13-49-03.gh-issue-99645.9w1QKq.rst
deleted file mode 100644
index f6ee449891d9f6..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-21-13-49-03.gh-issue-99645.9w1QKq.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Fix a bug in handling class cleanups in :class:`unittest.TestCase`. Now
-``addClassCleanup()`` uses separate lists for different ``TestCase``
-subclasses, and ``doClassCleanups()`` only cleans up the particular class.
diff --git a/Misc/NEWS.d/next/Library/2022-11-21-16-24-01.gh-issue-83035.qZIujU.rst b/Misc/NEWS.d/next/Library/2022-11-21-16-24-01.gh-issue-83035.qZIujU.rst
new file mode 100644
index 00000000000000..629d9aefb2d869
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2022-11-21-16-24-01.gh-issue-83035.qZIujU.rst
@@ -0,0 +1 @@
+Fix :func:`inspect.getsource` handling of decorator calls with nested parentheses.
diff --git a/Misc/NEWS.d/next/Library/2022-11-21-17-56-18.gh-issue-51524.nTykx8.rst b/Misc/NEWS.d/next/Library/2022-11-21-17-56-18.gh-issue-51524.nTykx8.rst
deleted file mode 100644
index 63fe7b8a3a3254..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-21-17-56-18.gh-issue-51524.nTykx8.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix bug when calling trace.CoverageResults with valid infile.
diff --git a/Misc/NEWS.d/next/Library/2022-11-22-19-31-26.gh-issue-79033.MW6kHq.rst b/Misc/NEWS.d/next/Library/2022-11-22-19-31-26.gh-issue-79033.MW6kHq.rst
deleted file mode 100644
index 4b12fd9c8d798f..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-22-19-31-26.gh-issue-79033.MW6kHq.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix :func:`asyncio.Server.wait_closed` to actually do what the docs promise -- wait for all existing connections to complete, after closing the server.
diff --git a/Misc/NEWS.d/next/Library/2022-11-23-23-58-45.gh-issue-94943.Oog0Zo.rst b/Misc/NEWS.d/next/Library/2022-11-23-23-58-45.gh-issue-94943.Oog0Zo.rst
new file mode 100644
index 00000000000000..ed4754e49bd2cf
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2022-11-23-23-58-45.gh-issue-94943.Oog0Zo.rst
@@ -0,0 +1,5 @@
+Add :ref:`enum-dataclass-support` to the
+:class:`~enum.Enum` :meth:`~enum.Enum.__repr__`.
+When inheriting from a :class:`~dataclasses.dataclass`,
+only show the field names in the value section of the member :func:`repr`,
+and not the dataclass' class name.
diff --git a/Misc/NEWS.d/next/Library/2022-12-03-20-06-16.gh-issue-98778.t5U9uc.rst b/Misc/NEWS.d/next/Library/2022-12-03-20-06-16.gh-issue-98778.t5U9uc.rst
new file mode 100644
index 00000000000000..b1c170dff3eabc
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2022-12-03-20-06-16.gh-issue-98778.t5U9uc.rst
@@ -0,0 +1,2 @@
+Update :exc:`~urllib.error.HTTPError` to be initialized properly, even if
+the ``fp`` is ``None``. Patch by Dong-hee Na.
diff --git a/Misc/NEWS.d/next/Library/2022-12-08-06-18-06.gh-issue-100098.uBvPlp.rst b/Misc/NEWS.d/next/Library/2022-12-08-06-18-06.gh-issue-100098.uBvPlp.rst
new file mode 100644
index 00000000000000..256f2bcd39f81d
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2022-12-08-06-18-06.gh-issue-100098.uBvPlp.rst
@@ -0,0 +1 @@
+Fix ``tuple`` subclasses being cast to ``tuple`` when used as enum values.
diff --git a/Misc/NEWS.d/next/Library/2022-12-09-10-35-36.bpo-44592.z-P3oe.rst b/Misc/NEWS.d/next/Library/2022-12-09-10-35-36.bpo-44592.z-P3oe.rst
new file mode 100644
index 00000000000000..7f290605934d76
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2022-12-09-10-35-36.bpo-44592.z-P3oe.rst
@@ -0,0 +1,2 @@
+Fixes inconsistent handling of case sensitivity of *extrasaction* arg in
+:class:`csv.DictWriter`.
diff --git a/Misc/NEWS.d/next/Security/2022-11-11-12-50-28.gh-issue-87604.OtwH5L.rst b/Misc/NEWS.d/next/Security/2022-11-11-12-50-28.gh-issue-87604.OtwH5L.rst
deleted file mode 100644
index c931409b817122..00000000000000
--- a/Misc/NEWS.d/next/Security/2022-11-11-12-50-28.gh-issue-87604.OtwH5L.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Avoid publishing list of active per-interpreter audit hooks via the
-:mod:`gc` module
diff --git a/Misc/NEWS.d/next/Tests/2022-06-16-13-26-31.gh-issue-93018.wvNx76.rst b/Misc/NEWS.d/next/Tests/2022-06-16-13-26-31.gh-issue-93018.wvNx76.rst
new file mode 100644
index 00000000000000..a8fb98048e4023
--- /dev/null
+++ b/Misc/NEWS.d/next/Tests/2022-06-16-13-26-31.gh-issue-93018.wvNx76.rst
@@ -0,0 +1 @@
+Make two tests forgiving towards host system libexpat with backported security fixes applied.
diff --git a/Misc/NEWS.d/next/Tests/2022-08-22-15-49-14.gh-issue-96002.4UE9UE.rst b/Misc/NEWS.d/next/Tests/2022-08-22-15-49-14.gh-issue-96002.4UE9UE.rst
deleted file mode 100644
index dc86e1d70f1289..00000000000000
--- a/Misc/NEWS.d/next/Tests/2022-08-22-15-49-14.gh-issue-96002.4UE9UE.rst
+++ /dev/null
@@ -1 +0,0 @@
-Add functional test for Argument Clinic.
diff --git a/Misc/NEWS.d/next/Tests/2022-11-19-13-34-28.gh-issue-99593.8ZfCkj.rst b/Misc/NEWS.d/next/Tests/2022-11-19-13-34-28.gh-issue-99593.8ZfCkj.rst
deleted file mode 100644
index ec4cda2080323f..00000000000000
--- a/Misc/NEWS.d/next/Tests/2022-11-19-13-34-28.gh-issue-99593.8ZfCkj.rst
+++ /dev/null
@@ -1 +0,0 @@
-Cover the Unicode C API with tests.
diff --git a/Misc/NEWS.d/next/Tests/2022-11-21-19-21-30.gh-issue-99659.4gP0nm.rst b/Misc/NEWS.d/next/Tests/2022-11-21-19-21-30.gh-issue-99659.4gP0nm.rst
deleted file mode 100644
index 3db1ec12b5202e..00000000000000
--- a/Misc/NEWS.d/next/Tests/2022-11-21-19-21-30.gh-issue-99659.4gP0nm.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Optional big memory tests in ``test_sqlite3`` now catch the correct
-:exc:`sqlite.DataError` exception type in case of too large strings and/or
-blobs passed.
diff --git a/Misc/NEWS.d/next/Tests/2022-12-08-00-03-37.gh-issue-100086.1zYpto.rst b/Misc/NEWS.d/next/Tests/2022-12-08-00-03-37.gh-issue-100086.1zYpto.rst
new file mode 100644
index 00000000000000..a5f1bb9f5a5e05
--- /dev/null
+++ b/Misc/NEWS.d/next/Tests/2022-12-08-00-03-37.gh-issue-100086.1zYpto.rst
@@ -0,0 +1,3 @@
+The Python test runner (libregrtest) now logs Python build information like
+"debug" vs "release" build, or LTO and PGO optimizations. Patch by Victor
+Stinner.
diff --git a/Misc/NEWS.d/next/Tools-Demos/2022-08-11-09-58-15.gh-issue-64490.PjwhM4.rst b/Misc/NEWS.d/next/Tools-Demos/2022-08-11-09-58-15.gh-issue-64490.PjwhM4.rst
deleted file mode 100644
index 4a308a9306055c..00000000000000
--- a/Misc/NEWS.d/next/Tools-Demos/2022-08-11-09-58-15.gh-issue-64490.PjwhM4.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-Argument Clinic varargs bugfixes
-
-* Fix out-of-bounds error in :c:func:`!_PyArg_UnpackKeywordsWithVararg`.
-* Fix incorrect check which allowed more than one varargs in clinic.py.
-* Fix miscalculation of ``noptargs`` in generated code.
-* Do not generate ``noptargs`` when there is a vararg argument and no optional argument.
-
diff --git a/Misc/NEWS.d/next/Windows/2020-06-06-15-10-37.bpo-40882.UvNbdj.rst b/Misc/NEWS.d/next/Windows/2020-06-06-15-10-37.bpo-40882.UvNbdj.rst
deleted file mode 100644
index 2670aeef9a2525..00000000000000
--- a/Misc/NEWS.d/next/Windows/2020-06-06-15-10-37.bpo-40882.UvNbdj.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Fix a memory leak in :class:`multiprocessing.shared_memory.SharedMemory` on
-Windows.
diff --git a/Misc/NEWS.d/next/Windows/2021-05-02-15-29-33.bpo-43984.U92jiv.rst b/Misc/NEWS.d/next/Windows/2021-05-02-15-29-33.bpo-43984.U92jiv.rst
new file mode 100644
index 00000000000000..a5975b2d00c7bf
--- /dev/null
+++ b/Misc/NEWS.d/next/Windows/2021-05-02-15-29-33.bpo-43984.U92jiv.rst
@@ -0,0 +1,3 @@
+:meth:`winreg.SetValueEx` now leaves the target value untouched in the case of conversion errors.
+Previously, ``-1`` would be written in case of such errors.
+
diff --git a/Misc/NEWS.d/next/Windows/2022-11-16-19-03-21.gh-issue-99442.6Dgk3Q.rst b/Misc/NEWS.d/next/Windows/2022-11-16-19-03-21.gh-issue-99442.6Dgk3Q.rst
deleted file mode 100644
index 8e19366c429715..00000000000000
--- a/Misc/NEWS.d/next/Windows/2022-11-16-19-03-21.gh-issue-99442.6Dgk3Q.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Fix handling in :ref:`launcher` when ``argv[0]`` does not include a file
-extension.
diff --git a/Misc/NEWS.d/next/Windows/2022-11-21-19-50-18.gh-issue-98629.tMmB_B.rst b/Misc/NEWS.d/next/Windows/2022-11-21-19-50-18.gh-issue-98629.tMmB_B.rst
deleted file mode 100644
index 46cbf998eb2001..00000000000000
--- a/Misc/NEWS.d/next/Windows/2022-11-21-19-50-18.gh-issue-98629.tMmB_B.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix initialization of :data:`sys.version` and ``sys._git`` on Windows
diff --git a/Misc/NEWS.d/next/Windows/2022-11-23-17-17-16.gh-issue-99345.jOa3-f.rst b/Misc/NEWS.d/next/Windows/2022-11-23-17-17-16.gh-issue-99345.jOa3-f.rst
deleted file mode 100644
index 99db0c55a67eed..00000000000000
--- a/Misc/NEWS.d/next/Windows/2022-11-23-17-17-16.gh-issue-99345.jOa3-f.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Use faster initialization functions to detect install location for Windows
-Store package
diff --git a/Misc/NEWS.d/next/Windows/2022-12-06-11-16-46.gh-issue-99941.GmUQ6o.rst b/Misc/NEWS.d/next/Windows/2022-12-06-11-16-46.gh-issue-99941.GmUQ6o.rst
new file mode 100644
index 00000000000000..a019d7287207d8
--- /dev/null
+++ b/Misc/NEWS.d/next/Windows/2022-12-06-11-16-46.gh-issue-99941.GmUQ6o.rst
@@ -0,0 +1,2 @@
+Ensure that :func:`asyncio.Protocol.data_received` receives an immutable
+:class:`bytes` object (as documented), instead of :class:`bytearray`.
diff --git a/Misc/NEWS.d/next/Windows/2022-12-09-22-47-42.gh-issue-79218.Yiot2e.rst b/Misc/NEWS.d/next/Windows/2022-12-09-22-47-42.gh-issue-79218.Yiot2e.rst
new file mode 100644
index 00000000000000..e2e6ca3c7796e0
--- /dev/null
+++ b/Misc/NEWS.d/next/Windows/2022-12-09-22-47-42.gh-issue-79218.Yiot2e.rst
@@ -0,0 +1 @@
+Define ``MS_WIN64`` for Mingw-w64 64bit, fix cython compilation failure.
diff --git a/Misc/NEWS.d/next/macOS/2022-11-01-10-32-23.gh-issue-98940.W3YzC_.rst b/Misc/NEWS.d/next/macOS/2022-11-01-10-32-23.gh-issue-98940.W3YzC_.rst
deleted file mode 100644
index 18ef0b0e252322..00000000000000
--- a/Misc/NEWS.d/next/macOS/2022-11-01-10-32-23.gh-issue-98940.W3YzC_.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix ``Mac/Extras.install.py`` file filter bug.
diff --git a/Misc/NEWS.d/next/macOS/2022-11-25-09-23-20.gh-issue-87235.SifjCD.rst b/Misc/NEWS.d/next/macOS/2022-11-25-09-23-20.gh-issue-87235.SifjCD.rst
deleted file mode 100644
index 3111e4975e87b3..00000000000000
--- a/Misc/NEWS.d/next/macOS/2022-11-25-09-23-20.gh-issue-87235.SifjCD.rst
+++ /dev/null
@@ -1 +0,0 @@
-On macOS ``python3 /dev/fd/9 9asyncio_get_event_loop_policy);
if (policy == NULL) {
return NULL;
@@ -538,7 +531,7 @@ future_init(FutureObj *fut, PyObject *loop)
if (loop == Py_None) {
asyncio_state *state = get_asyncio_state_by_def((PyObject *)fut);
- loop = get_event_loop(state, 1);
+ loop = get_event_loop(state);
if (loop == NULL) {
return -1;
}
@@ -3229,20 +3222,7 @@ _asyncio_get_event_loop_impl(PyObject *module)
/*[clinic end generated code: output=2a2d8b2f824c648b input=9364bf2916c8655d]*/
{
asyncio_state *state = get_asyncio_state(module);
- return get_event_loop(state, 1);
-}
-
-/*[clinic input]
-_asyncio._get_event_loop
- stacklevel: int = 3
-[clinic start generated code]*/
-
-static PyObject *
-_asyncio__get_event_loop_impl(PyObject *module, int stacklevel)
-/*[clinic end generated code: output=9c1d6d3c802e67c9 input=d17aebbd686f711d]*/
-{
- asyncio_state *state = get_asyncio_state(module);
- return get_event_loop(state, stacklevel-1);
+ return get_event_loop(state);
}
/*[clinic input]
@@ -3620,7 +3600,6 @@ PyDoc_STRVAR(module_doc, "Accelerator module for asyncio");
static PyMethodDef asyncio_methods[] = {
_ASYNCIO_GET_EVENT_LOOP_METHODDEF
- _ASYNCIO__GET_EVENT_LOOP_METHODDEF
_ASYNCIO_GET_RUNNING_LOOP_METHODDEF
_ASYNCIO__GET_RUNNING_LOOP_METHODDEF
_ASYNCIO__SET_RUNNING_LOOP_METHODDEF
diff --git a/Modules/_codecsmodule.c b/Modules/_codecsmodule.c
index 8a0df4266e8354..d5035d20600ae2 100644
--- a/Modules/_codecsmodule.c
+++ b/Modules/_codecsmodule.c
@@ -256,14 +256,14 @@ _codecs_escape_encode_impl(PyObject *module, PyObject *data,
_codecs.utf_7_decode
data: Py_buffer
errors: str(accept={str, NoneType}) = None
- final: bool(accept={int}) = False
+ final: bool = False
/
[clinic start generated code]*/
static PyObject *
_codecs_utf_7_decode_impl(PyObject *module, Py_buffer *data,
const char *errors, int final)
-/*[clinic end generated code: output=0cd3a944a32a4089 input=22c395d357815d26]*/
+/*[clinic end generated code: output=0cd3a944a32a4089 input=dbf8c8998102dc7d]*/
{
Py_ssize_t consumed = data->len;
PyObject *decoded = PyUnicode_DecodeUTF7Stateful(data->buf, data->len,
@@ -276,14 +276,14 @@ _codecs_utf_7_decode_impl(PyObject *module, Py_buffer *data,
_codecs.utf_8_decode
data: Py_buffer
errors: str(accept={str, NoneType}) = None
- final: bool(accept={int}) = False
+ final: bool = False
/
[clinic start generated code]*/
static PyObject *
_codecs_utf_8_decode_impl(PyObject *module, Py_buffer *data,
const char *errors, int final)
-/*[clinic end generated code: output=10f74dec8d9bb8bf input=f611b3867352ba59]*/
+/*[clinic end generated code: output=10f74dec8d9bb8bf input=ca06bc8a9c970e25]*/
{
Py_ssize_t consumed = data->len;
PyObject *decoded = PyUnicode_DecodeUTF8Stateful(data->buf, data->len,
@@ -296,14 +296,14 @@ _codecs_utf_8_decode_impl(PyObject *module, Py_buffer *data,
_codecs.utf_16_decode
data: Py_buffer
errors: str(accept={str, NoneType}) = None
- final: bool(accept={int}) = False
+ final: bool = False
/
[clinic start generated code]*/
static PyObject *
_codecs_utf_16_decode_impl(PyObject *module, Py_buffer *data,
const char *errors, int final)
-/*[clinic end generated code: output=783b442abcbcc2d0 input=191d360bd7309180]*/
+/*[clinic end generated code: output=783b442abcbcc2d0 input=5b0f52071ba6cadc]*/
{
int byteorder = 0;
/* This is overwritten unless final is true. */
@@ -318,14 +318,14 @@ _codecs_utf_16_decode_impl(PyObject *module, Py_buffer *data,
_codecs.utf_16_le_decode
data: Py_buffer
errors: str(accept={str, NoneType}) = None
- final: bool(accept={int}) = False
+ final: bool = False
/
[clinic start generated code]*/
static PyObject *
_codecs_utf_16_le_decode_impl(PyObject *module, Py_buffer *data,
const char *errors, int final)
-/*[clinic end generated code: output=899b9e6364379dcd input=c6904fdc27fb4724]*/
+/*[clinic end generated code: output=899b9e6364379dcd input=115bd8c7b783d0bf]*/
{
int byteorder = -1;
/* This is overwritten unless final is true. */
@@ -340,14 +340,14 @@ _codecs_utf_16_le_decode_impl(PyObject *module, Py_buffer *data,
_codecs.utf_16_be_decode
data: Py_buffer
errors: str(accept={str, NoneType}) = None
- final: bool(accept={int}) = False
+ final: bool = False
/
[clinic start generated code]*/
static PyObject *
_codecs_utf_16_be_decode_impl(PyObject *module, Py_buffer *data,
const char *errors, int final)
-/*[clinic end generated code: output=49f6465ea07669c8 input=e49012400974649b]*/
+/*[clinic end generated code: output=49f6465ea07669c8 input=63131422b01f9cb4]*/
{
int byteorder = 1;
/* This is overwritten unless final is true. */
@@ -370,14 +370,14 @@ _codecs.utf_16_ex_decode
data: Py_buffer
errors: str(accept={str, NoneType}) = None
byteorder: int = 0
- final: bool(accept={int}) = False
+ final: bool = False
/
[clinic start generated code]*/
static PyObject *
_codecs_utf_16_ex_decode_impl(PyObject *module, Py_buffer *data,
const char *errors, int byteorder, int final)
-/*[clinic end generated code: output=0f385f251ecc1988 input=5a9c19f2e6b6cf0e]*/
+/*[clinic end generated code: output=0f385f251ecc1988 input=f368a51cf384bf4c]*/
{
/* This is overwritten unless final is true. */
Py_ssize_t consumed = data->len;
@@ -394,14 +394,14 @@ _codecs_utf_16_ex_decode_impl(PyObject *module, Py_buffer *data,
_codecs.utf_32_decode
data: Py_buffer
errors: str(accept={str, NoneType}) = None
- final: bool(accept={int}) = False
+ final: bool = False
/
[clinic start generated code]*/
static PyObject *
_codecs_utf_32_decode_impl(PyObject *module, Py_buffer *data,
const char *errors, int final)
-/*[clinic end generated code: output=2fc961807f7b145f input=fd7193965627eb58]*/
+/*[clinic end generated code: output=2fc961807f7b145f input=fcdf3658c5e9b5f3]*/
{
int byteorder = 0;
/* This is overwritten unless final is true. */
@@ -416,14 +416,14 @@ _codecs_utf_32_decode_impl(PyObject *module, Py_buffer *data,
_codecs.utf_32_le_decode
data: Py_buffer
errors: str(accept={str, NoneType}) = None
- final: bool(accept={int}) = False
+ final: bool = False
/
[clinic start generated code]*/
static PyObject *
_codecs_utf_32_le_decode_impl(PyObject *module, Py_buffer *data,
const char *errors, int final)
-/*[clinic end generated code: output=ec8f46b67a94f3e6 input=9078ec70acfe7613]*/
+/*[clinic end generated code: output=ec8f46b67a94f3e6 input=12220556e885f817]*/
{
int byteorder = -1;
/* This is overwritten unless final is true. */
@@ -438,14 +438,14 @@ _codecs_utf_32_le_decode_impl(PyObject *module, Py_buffer *data,
_codecs.utf_32_be_decode
data: Py_buffer
errors: str(accept={str, NoneType}) = None
- final: bool(accept={int}) = False
+ final: bool = False
/
[clinic start generated code]*/
static PyObject *
_codecs_utf_32_be_decode_impl(PyObject *module, Py_buffer *data,
const char *errors, int final)
-/*[clinic end generated code: output=ff82bae862c92c4e input=f1ae1bbbb86648ff]*/
+/*[clinic end generated code: output=ff82bae862c92c4e input=2bc669b4781598db]*/
{
int byteorder = 1;
/* This is overwritten unless final is true. */
@@ -468,14 +468,14 @@ _codecs.utf_32_ex_decode
data: Py_buffer
errors: str(accept={str, NoneType}) = None
byteorder: int = 0
- final: bool(accept={int}) = False
+ final: bool = False
/
[clinic start generated code]*/
static PyObject *
_codecs_utf_32_ex_decode_impl(PyObject *module, Py_buffer *data,
const char *errors, int byteorder, int final)
-/*[clinic end generated code: output=6bfb177dceaf4848 input=e46a73bc859d0bd0]*/
+/*[clinic end generated code: output=6bfb177dceaf4848 input=4a2323d0013620df]*/
{
Py_ssize_t consumed = data->len;
PyObject *decoded = PyUnicode_DecodeUTF32Stateful(data->buf, data->len,
@@ -490,14 +490,14 @@ _codecs_utf_32_ex_decode_impl(PyObject *module, Py_buffer *data,
_codecs.unicode_escape_decode
data: Py_buffer(accept={str, buffer})
errors: str(accept={str, NoneType}) = None
- final: bool(accept={int}) = True
+ final: bool = True
/
[clinic start generated code]*/
static PyObject *
_codecs_unicode_escape_decode_impl(PyObject *module, Py_buffer *data,
const char *errors, int final)
-/*[clinic end generated code: output=b284f97b12c635ee input=6154f039a9f7c639]*/
+/*[clinic end generated code: output=b284f97b12c635ee input=15019f081ffe272b]*/
{
Py_ssize_t consumed = data->len;
PyObject *decoded = _PyUnicode_DecodeUnicodeEscapeStateful(data->buf, data->len,
@@ -510,14 +510,14 @@ _codecs_unicode_escape_decode_impl(PyObject *module, Py_buffer *data,
_codecs.raw_unicode_escape_decode
data: Py_buffer(accept={str, buffer})
errors: str(accept={str, NoneType}) = None
- final: bool(accept={int}) = True
+ final: bool = True
/
[clinic start generated code]*/
static PyObject *
_codecs_raw_unicode_escape_decode_impl(PyObject *module, Py_buffer *data,
const char *errors, int final)
-/*[clinic end generated code: output=11dbd96301e2879e input=2d166191beb3235a]*/
+/*[clinic end generated code: output=11dbd96301e2879e input=b93f823aa8c343ad]*/
{
Py_ssize_t consumed = data->len;
PyObject *decoded = _PyUnicode_DecodeRawUnicodeEscapeStateful(data->buf, data->len,
@@ -586,14 +586,14 @@ _codecs_charmap_decode_impl(PyObject *module, Py_buffer *data,
_codecs.mbcs_decode
data: Py_buffer
errors: str(accept={str, NoneType}) = None
- final: bool(accept={int}) = False
+ final: bool = False
/
[clinic start generated code]*/
static PyObject *
_codecs_mbcs_decode_impl(PyObject *module, Py_buffer *data,
const char *errors, int final)
-/*[clinic end generated code: output=39b65b8598938c4b input=1c1d50f08fa53789]*/
+/*[clinic end generated code: output=39b65b8598938c4b input=f144ad1ed6d8f5a6]*/
{
Py_ssize_t consumed = data->len;
PyObject *decoded = PyUnicode_DecodeMBCSStateful(data->buf, data->len,
@@ -605,14 +605,14 @@ _codecs_mbcs_decode_impl(PyObject *module, Py_buffer *data,
_codecs.oem_decode
data: Py_buffer
errors: str(accept={str, NoneType}) = None
- final: bool(accept={int}) = False
+ final: bool = False
/
[clinic start generated code]*/
static PyObject *
_codecs_oem_decode_impl(PyObject *module, Py_buffer *data,
const char *errors, int final)
-/*[clinic end generated code: output=da1617612f3fcad8 input=81b67cba811022e5]*/
+/*[clinic end generated code: output=da1617612f3fcad8 input=629bf87376d211b4]*/
{
Py_ssize_t consumed = data->len;
PyObject *decoded = PyUnicode_DecodeCodePageStateful(CP_OEMCP,
@@ -625,14 +625,14 @@ _codecs.code_page_decode
codepage: int
data: Py_buffer
errors: str(accept={str, NoneType}) = None
- final: bool(accept={int}) = False
+ final: bool = False
/
[clinic start generated code]*/
static PyObject *
_codecs_code_page_decode_impl(PyObject *module, int codepage,
Py_buffer *data, const char *errors, int final)
-/*[clinic end generated code: output=53008ea967da3fff input=c5f58d036cb63575]*/
+/*[clinic end generated code: output=53008ea967da3fff input=6a32589b0658c277]*/
{
Py_ssize_t consumed = data->len;
PyObject *decoded = PyUnicode_DecodeCodePageStateful(codepage,
diff --git a/Modules/_cursesmodule.c b/Modules/_cursesmodule.c
index 5146b4004a141a..5691a419a32f8e 100644
--- a/Modules/_cursesmodule.c
+++ b/Modules/_cursesmodule.c
@@ -2371,7 +2371,7 @@ _curses.window.touchline
start: int
count: int
[
- changed: bool(accept={int}) = True
+ changed: bool = True
]
/
@@ -2384,7 +2384,7 @@ as having been changed (changed=True) or unchanged (changed=False).
static PyObject *
_curses_window_touchline_impl(PyCursesWindowObject *self, int start,
int count, int group_right_1, int changed)
-/*[clinic end generated code: output=65d05b3f7438c61d input=918ad1cbdadf93ea]*/
+/*[clinic end generated code: output=65d05b3f7438c61d input=a98aa4f79b6be845]*/
{
if (!group_right_1) {
return PyCursesCheckERR(touchline(self->win, start, count), "touchline");
@@ -2706,7 +2706,7 @@ NoArgTrueFalseFunctionBody(can_change_color)
/*[clinic input]
_curses.cbreak
- flag: bool(accept={int}) = True
+ flag: bool = True
If false, the effect is the same as calling nocbreak().
/
@@ -2721,7 +2721,7 @@ Calling first raw() then cbreak() leaves the terminal in cbreak mode.
static PyObject *
_curses_cbreak_impl(PyObject *module, int flag)
-/*[clinic end generated code: output=9f9dee9664769751 input=150be619eb1f1458]*/
+/*[clinic end generated code: output=9f9dee9664769751 input=c7d0bddda93016c1]*/
NoArgOrFlagNoReturnFunctionBody(cbreak, flag)
/*[clinic input]
@@ -2870,7 +2870,7 @@ NoArgNoReturnFunctionBody(doupdate)
/*[clinic input]
_curses.echo
- flag: bool(accept={int}) = True
+ flag: bool = True
If false, the effect is the same as calling noecho().
/
@@ -2881,7 +2881,7 @@ In echo mode, each character input is echoed to the screen as it is entered.
static PyObject *
_curses_echo_impl(PyObject *module, int flag)
-/*[clinic end generated code: output=03acb2ddfa6c8729 input=2e9e891d637eac5d]*/
+/*[clinic end generated code: output=03acb2ddfa6c8729 input=86cd4d5bb1d569c0]*/
NoArgOrFlagNoReturnFunctionBody(echo, flag)
/*[clinic input]
@@ -3496,14 +3496,14 @@ _curses_set_tabsize_impl(PyObject *module, int size)
/*[clinic input]
_curses.intrflush
- flag: bool(accept={int})
+ flag: bool
/
[clinic start generated code]*/
static PyObject *
_curses_intrflush_impl(PyObject *module, int flag)
-/*[clinic end generated code: output=c1986df35e999a0f input=fcba57bb28dfd795]*/
+/*[clinic end generated code: output=c1986df35e999a0f input=c65fe2ef973fe40a]*/
{
PyCursesInitialised;
@@ -3605,7 +3605,7 @@ NoArgReturnStringFunctionBody(longname)
/*[clinic input]
_curses.meta
- yes: bool(accept={int})
+ yes: bool
/
Enable/disable meta keys.
@@ -3616,7 +3616,7 @@ allow only 7-bit characters.
static PyObject *
_curses_meta_impl(PyObject *module, int yes)
-/*[clinic end generated code: output=22f5abda46a605d8 input=af9892e3a74f35db]*/
+/*[clinic end generated code: output=22f5abda46a605d8 input=cfe7da79f51d0e30]*/
{
PyCursesInitialised;
@@ -3766,7 +3766,7 @@ _curses_newwin_impl(PyObject *module, int nlines, int ncols,
/*[clinic input]
_curses.nl
- flag: bool(accept={int}) = True
+ flag: bool = True
If false, the effect is the same as calling nonl().
/
@@ -3778,7 +3778,7 @@ newline into return and line-feed on output. Newline mode is initially on.
static PyObject *
_curses_nl_impl(PyObject *module, int flag)
-/*[clinic end generated code: output=b39cc0ffc9015003 input=cf36a63f7b86e28a]*/
+/*[clinic end generated code: output=b39cc0ffc9015003 input=18e3e9c6e8cfcf6f]*/
NoArgOrFlagNoReturnFunctionBody(nl, flag)
/*[clinic input]
@@ -3925,7 +3925,7 @@ _curses_putp_impl(PyObject *module, const char *string)
/*[clinic input]
_curses.qiflush
- flag: bool(accept={int}) = True
+ flag: bool = True
If false, the effect is the same as calling noqiflush().
/
@@ -3937,7 +3937,7 @@ will be flushed when the INTR, QUIT and SUSP characters are read.
static PyObject *
_curses_qiflush_impl(PyObject *module, int flag)
-/*[clinic end generated code: output=9167e862f760ea30 input=e9e4a389946a0dbc]*/
+/*[clinic end generated code: output=9167e862f760ea30 input=6ec8b3e2b717ec40]*/
{
PyCursesInitialised;
@@ -4018,7 +4018,7 @@ _curses_update_lines_cols_impl(PyObject *module)
/*[clinic input]
_curses.raw
- flag: bool(accept={int}) = True
+ flag: bool = True
If false, the effect is the same as calling noraw().
/
@@ -4031,7 +4031,7 @@ curses input functions one by one.
static PyObject *
_curses_raw_impl(PyObject *module, int flag)
-/*[clinic end generated code: output=a750e4b342be015b input=e36d8db27832b848]*/
+/*[clinic end generated code: output=a750e4b342be015b input=4b447701389fb4df]*/
NoArgOrFlagNoReturnFunctionBody(raw, flag)
/*[clinic input]
@@ -4503,7 +4503,7 @@ _curses_unget_wch(PyObject *module, PyObject *ch)
/*[clinic input]
_curses.use_env
- flag: bool(accept={int})
+ flag: bool
/
Use environment variables LINES and COLUMNS.
@@ -4520,7 +4520,7 @@ not set).
static PyObject *
_curses_use_env_impl(PyObject *module, int flag)
-/*[clinic end generated code: output=b2c445e435c0b164 input=1778eb1e9151ea37]*/
+/*[clinic end generated code: output=b2c445e435c0b164 input=06ac30948f2d78e4]*/
{
use_env(flag);
Py_RETURN_NONE;
diff --git a/Modules/_io/_iomodule.c b/Modules/_io/_iomodule.c
index 121d9617e1883b..af5950cf66c178 100644
--- a/Modules/_io/_iomodule.c
+++ b/Modules/_io/_iomodule.c
@@ -59,7 +59,7 @@ PyDoc_STRVAR(module_doc,
" I/O classes. open() uses the file's blksize (as obtained by os.stat) if\n"
" possible.\n"
);
-
+
/*
* The main open() function
@@ -74,7 +74,7 @@ _io.open
encoding: str(accept={str, NoneType}) = None
errors: str(accept={str, NoneType}) = None
newline: str(accept={str, NoneType}) = None
- closefd: bool(accept={int}) = True
+ closefd: bool = True
opener: object = None
Open file and return a stream. Raise OSError upon failure.
@@ -196,7 +196,7 @@ static PyObject *
_io_open_impl(PyObject *module, PyObject *file, const char *mode,
int buffering, const char *encoding, const char *errors,
const char *newline, int closefd, PyObject *opener)
-/*[clinic end generated code: output=aefafc4ce2b46dc0 input=5bb37f174cb2fb11]*/
+/*[clinic end generated code: output=aefafc4ce2b46dc0 input=cd034e7cdfbf4e78]*/
{
unsigned i;
@@ -204,8 +204,7 @@ _io_open_impl(PyObject *module, PyObject *file, const char *mode,
int text = 0, binary = 0;
char rawmode[6], *m;
- int line_buffering, is_number;
- long isatty = 0;
+ int line_buffering, is_number, isatty = 0;
PyObject *raw, *modeobj = NULL, *buffer, *wrapper, *result = NULL, *path_or_fd = NULL;
@@ -345,9 +344,9 @@ _io_open_impl(PyObject *module, PyObject *file, const char *mode,
PyObject *res = PyObject_CallMethodNoArgs(raw, &_Py_ID(isatty));
if (res == NULL)
goto error;
- isatty = PyLong_AsLong(res);
+ isatty = PyObject_IsTrue(res);
Py_DECREF(res);
- if (isatty == -1 && PyErr_Occurred())
+ if (isatty < 0)
goto error;
}
@@ -509,7 +508,7 @@ _io_open_code_impl(PyObject *module, PyObject *path)
{
return PyFile_OpenCodeObject(path);
}
-
+
/*
* Private helpers for the io module.
*/
diff --git a/Modules/_io/bufferedio.c b/Modules/_io/bufferedio.c
index 6df55b5b8303c2..ba8969f0bcd100 100644
--- a/Modules/_io/bufferedio.c
+++ b/Modules/_io/bufferedio.c
@@ -746,26 +746,26 @@ _buffered_init(buffered *self)
int
_PyIO_trap_eintr(void)
{
- static PyObject *eintr_int = NULL;
PyObject *typ, *val, *tb;
PyOSErrorObject *env_err;
-
- if (eintr_int == NULL) {
- eintr_int = PyLong_FromLong(EINTR);
- assert(eintr_int != NULL);
- }
if (!PyErr_ExceptionMatches(PyExc_OSError))
return 0;
PyErr_Fetch(&typ, &val, &tb);
PyErr_NormalizeException(&typ, &val, &tb);
env_err = (PyOSErrorObject *) val;
assert(env_err != NULL);
- if (env_err->myerrno != NULL &&
- PyObject_RichCompareBool(env_err->myerrno, eintr_int, Py_EQ) > 0) {
- Py_DECREF(typ);
- Py_DECREF(val);
- Py_XDECREF(tb);
- return 1;
+ if (env_err->myerrno != NULL) {
+ assert(EINTR > 0 && EINTR < INT_MAX);
+ assert(PyLong_CheckExact(env_err->myerrno));
+ int overflow;
+ int myerrno = PyLong_AsLongAndOverflow(env_err->myerrno, &overflow);
+ PyErr_Clear();
+ if (myerrno == EINTR) {
+ Py_DECREF(typ);
+ Py_DECREF(val);
+ Py_XDECREF(tb);
+ return 1;
+ }
}
/* This silences any error set by PyObject_RichCompareBool() */
PyErr_Restore(typ, val, tb);
diff --git a/Modules/_io/clinic/_iomodule.c.h b/Modules/_io/clinic/_iomodule.c.h
index b38738486f6856..4d76e333b0f293 100644
--- a/Modules/_io/clinic/_iomodule.c.h
+++ b/Modules/_io/clinic/_iomodule.c.h
@@ -280,8 +280,8 @@ _io_open(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw
}
}
if (args[6]) {
- closefd = _PyLong_AsInt(args[6]);
- if (closefd == -1 && PyErr_Occurred()) {
+ closefd = PyObject_IsTrue(args[6]);
+ if (closefd < 0) {
goto exit;
}
if (!--noptargs) {
@@ -407,4 +407,4 @@ _io_open_code(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObjec
exit:
return return_value;
}
-/*[clinic end generated code: output=1f8001287a423470 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=f387eba3f4c0254a input=a9049054013a1b77]*/
diff --git a/Modules/_io/clinic/fileio.c.h b/Modules/_io/clinic/fileio.c.h
index a925b94fe07531..b6e9bd5b65a029 100644
--- a/Modules/_io/clinic/fileio.c.h
+++ b/Modules/_io/clinic/fileio.c.h
@@ -116,8 +116,8 @@ _io_FileIO___init__(PyObject *self, PyObject *args, PyObject *kwargs)
}
}
if (fastargs[2]) {
- closefd = _PyLong_AsInt(fastargs[2]);
- if (closefd == -1 && PyErr_Occurred()) {
+ closefd = PyObject_IsTrue(fastargs[2]);
+ if (closefd < 0) {
goto exit;
}
if (!--noptargs) {
@@ -466,4 +466,4 @@ _io_FileIO_isatty(fileio *self, PyObject *Py_UNUSED(ignored))
#ifndef _IO_FILEIO_TRUNCATE_METHODDEF
#define _IO_FILEIO_TRUNCATE_METHODDEF
#endif /* !defined(_IO_FILEIO_TRUNCATE_METHODDEF) */
-/*[clinic end generated code: output=ff479a26cab0d479 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=27f883807a6c29ae input=a9049054013a1b77]*/
diff --git a/Modules/_io/clinic/textio.c.h b/Modules/_io/clinic/textio.c.h
index 038f0a5c209d49..db968e884cc805 100644
--- a/Modules/_io/clinic/textio.c.h
+++ b/Modules/_io/clinic/textio.c.h
@@ -68,8 +68,8 @@ _io_IncrementalNewlineDecoder___init__(PyObject *self, PyObject *args, PyObject
goto exit;
}
decoder = fastargs[0];
- translate = _PyLong_AsInt(fastargs[1]);
- if (translate == -1 && PyErr_Occurred()) {
+ translate = PyObject_IsTrue(fastargs[1]);
+ if (translate < 0) {
goto exit;
}
if (!noptargs) {
@@ -137,8 +137,8 @@ _io_IncrementalNewlineDecoder_decode(nldecoder_object *self, PyObject *const *ar
if (!noptargs) {
goto skip_optional_pos;
}
- final = _PyLong_AsInt(args[1]);
- if (final == -1 && PyErr_Occurred()) {
+ final = PyObject_IsTrue(args[1]);
+ if (final < 0) {
goto exit;
}
skip_optional_pos:
@@ -331,16 +331,16 @@ _io_TextIOWrapper___init__(PyObject *self, PyObject *args, PyObject *kwargs)
}
}
if (fastargs[4]) {
- line_buffering = _PyLong_AsInt(fastargs[4]);
- if (line_buffering == -1 && PyErr_Occurred()) {
+ line_buffering = PyObject_IsTrue(fastargs[4]);
+ if (line_buffering < 0) {
goto exit;
}
if (!--noptargs) {
goto skip_optional_pos;
}
}
- write_through = _PyLong_AsInt(fastargs[5]);
- if (write_through == -1 && PyErr_Occurred()) {
+ write_through = PyObject_IsTrue(fastargs[5]);
+ if (write_through < 0) {
goto exit;
}
skip_optional_pos:
@@ -769,4 +769,4 @@ _io_TextIOWrapper_close(textio *self, PyObject *Py_UNUSED(ignored))
{
return _io_TextIOWrapper_close_impl(self);
}
-/*[clinic end generated code: output=aecd376eca3cb148 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=73f84b13c343b34b input=a9049054013a1b77]*/
diff --git a/Modules/_io/clinic/winconsoleio.c.h b/Modules/_io/clinic/winconsoleio.c.h
index 65820a8f2ea0b3..df834dbde40f5b 100644
--- a/Modules/_io/clinic/winconsoleio.c.h
+++ b/Modules/_io/clinic/winconsoleio.c.h
@@ -115,8 +115,8 @@ _io__WindowsConsoleIO___init__(PyObject *self, PyObject *args, PyObject *kwargs)
}
}
if (fastargs[2]) {
- closefd = _PyLong_AsInt(fastargs[2]);
- if (closefd == -1 && PyErr_Occurred()) {
+ closefd = PyObject_IsTrue(fastargs[2]);
+ if (closefd < 0) {
goto exit;
}
if (!--noptargs) {
@@ -407,4 +407,4 @@ _io__WindowsConsoleIO_isatty(winconsoleio *self, PyObject *Py_UNUSED(ignored))
#ifndef _IO__WINDOWSCONSOLEIO_ISATTY_METHODDEF
#define _IO__WINDOWSCONSOLEIO_ISATTY_METHODDEF
#endif /* !defined(_IO__WINDOWSCONSOLEIO_ISATTY_METHODDEF) */
-/*[clinic end generated code: output=08ae244e9a44da55 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=4920e9068e0cf08a input=a9049054013a1b77]*/
diff --git a/Modules/_io/fileio.c b/Modules/_io/fileio.c
index 659297ef1b1d30..d1a183cedac53a 100644
--- a/Modules/_io/fileio.c
+++ b/Modules/_io/fileio.c
@@ -198,7 +198,7 @@ extern int _Py_open_cloexec_works;
_io.FileIO.__init__
file as nameobj: object
mode: str = "r"
- closefd: bool(accept={int}) = True
+ closefd: bool = True
opener: object = None
Open a file.
@@ -219,7 +219,7 @@ results in functionality similar to passing None).
static int
_io_FileIO___init___impl(fileio *self, PyObject *nameobj, const char *mode,
int closefd, PyObject *opener)
-/*[clinic end generated code: output=23413f68e6484bbd input=1596c9157a042a39]*/
+/*[clinic end generated code: output=23413f68e6484bbd input=588aac967e0ba74b]*/
{
#ifdef MS_WINDOWS
Py_UNICODE *widename = NULL;
diff --git a/Modules/_io/textio.c b/Modules/_io/textio.c
index 3091f6efafccd4..32ab8a44c62151 100644
--- a/Modules/_io/textio.c
+++ b/Modules/_io/textio.c
@@ -212,7 +212,7 @@ typedef struct {
/*[clinic input]
_io.IncrementalNewlineDecoder.__init__
decoder: object
- translate: int
+ translate: bool
errors: object(c_default="NULL") = "strict"
Codec used when reading a file in universal newlines mode.
@@ -229,7 +229,7 @@ static int
_io_IncrementalNewlineDecoder___init___impl(nldecoder_object *self,
PyObject *decoder, int translate,
PyObject *errors)
-/*[clinic end generated code: output=fbd04d443e764ec2 input=89db6b19c6b126bf]*/
+/*[clinic end generated code: output=fbd04d443e764ec2 input=ed547aa257616b0e]*/
{
if (errors == NULL) {
@@ -484,13 +484,13 @@ _PyIncrementalNewlineDecoder_decode(PyObject *myself,
/*[clinic input]
_io.IncrementalNewlineDecoder.decode
input: object
- final: bool(accept={int}) = False
+ final: bool = False
[clinic start generated code]*/
static PyObject *
_io_IncrementalNewlineDecoder_decode_impl(nldecoder_object *self,
PyObject *input, int final)
-/*[clinic end generated code: output=0d486755bb37a66e input=a4ea97f26372d866]*/
+/*[clinic end generated code: output=0d486755bb37a66e input=90e223c70322c5cd]*/
{
return _PyIncrementalNewlineDecoder_decode((PyObject *) self, input, final);
}
@@ -1023,8 +1023,8 @@ _io.TextIOWrapper.__init__
encoding: str(accept={str, NoneType}) = None
errors: object = None
newline: str(accept={str, NoneType}) = None
- line_buffering: bool(accept={int}) = False
- write_through: bool(accept={int}) = False
+ line_buffering: bool = False
+ write_through: bool = False
Character and line based layer over a BufferedIOBase object, buffer.
@@ -1061,7 +1061,7 @@ _io_TextIOWrapper___init___impl(textio *self, PyObject *buffer,
const char *encoding, PyObject *errors,
const char *newline, int line_buffering,
int write_through)
-/*[clinic end generated code: output=72267c0c01032ed2 input=72590963698f289b]*/
+/*[clinic end generated code: output=72267c0c01032ed2 input=e6cfaaaf6059d4f5]*/
{
PyObject *raw, *codec_info = NULL;
PyObject *res;
diff --git a/Modules/_io/winconsoleio.c b/Modules/_io/winconsoleio.c
index 5c1a6dd86fc54f..d5de64b4ac3dfd 100644
--- a/Modules/_io/winconsoleio.c
+++ b/Modules/_io/winconsoleio.c
@@ -235,7 +235,7 @@ winconsoleio_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
_io._WindowsConsoleIO.__init__
file as nameobj: object
mode: str = "r"
- closefd: bool(accept={int}) = True
+ closefd: bool = True
opener: object = None
Open a console buffer by file descriptor.
@@ -249,7 +249,7 @@ static int
_io__WindowsConsoleIO___init___impl(winconsoleio *self, PyObject *nameobj,
const char *mode, int closefd,
PyObject *opener)
-/*[clinic end generated code: output=3fd9cbcdd8d95429 input=06ae4b863c63244b]*/
+/*[clinic end generated code: output=3fd9cbcdd8d95429 input=7a3eed6bbe998fd9]*/
{
const char *s;
wchar_t *name = NULL;
diff --git a/Modules/_json.c b/Modules/_json.c
index 81431aa1041c55..429b4ee0fa8d8d 100644
--- a/Modules/_json.c
+++ b/Modules/_json.c
@@ -556,7 +556,7 @@ py_scanstring(PyObject* Py_UNUSED(self), PyObject *args)
Py_ssize_t end;
Py_ssize_t next_end = -1;
int strict = 1;
- if (!PyArg_ParseTuple(args, "On|i:scanstring", &pystr, &end, &strict)) {
+ if (!PyArg_ParseTuple(args, "On|p:scanstring", &pystr, &end, &strict)) {
return NULL;
}
if (PyUnicode_Check(pystr)) {
diff --git a/Modules/_lsprof.c b/Modules/_lsprof.c
index cb80c8d339325b..37170bbea56ad3 100644
--- a/Modules/_lsprof.c
+++ b/Modules/_lsprof.c
@@ -667,7 +667,7 @@ profiler_enable(ProfilerObject *self, PyObject *args, PyObject *kwds)
int subcalls = -1;
int builtins = -1;
static char *kwlist[] = {"subcalls", "builtins", 0};
- if (!PyArg_ParseTupleAndKeywords(args, kwds, "|ii:enable",
+ if (!PyArg_ParseTupleAndKeywords(args, kwds, "|pp:enable",
kwlist, &subcalls, &builtins))
return NULL;
if (setSubcalls(self, subcalls) < 0 || setBuiltins(self, builtins) < 0) {
@@ -770,7 +770,7 @@ profiler_init(ProfilerObject *pObj, PyObject *args, PyObject *kw)
static char *kwlist[] = {"timer", "timeunit",
"subcalls", "builtins", 0};
- if (!PyArg_ParseTupleAndKeywords(args, kw, "|Odii:Profiler", kwlist,
+ if (!PyArg_ParseTupleAndKeywords(args, kw, "|Odpp:Profiler", kwlist,
&timer, &timeunit,
&subcalls, &builtins))
return -1;
diff --git a/Modules/_multiprocessing/clinic/semaphore.c.h b/Modules/_multiprocessing/clinic/semaphore.c.h
index dce0366c266f10..35347169bc1591 100644
--- a/Modules/_multiprocessing/clinic/semaphore.c.h
+++ b/Modules/_multiprocessing/clinic/semaphore.c.h
@@ -65,8 +65,8 @@ _multiprocessing_SemLock_acquire(SemLockObject *self, PyObject *const *args, Py_
goto skip_optional_pos;
}
if (args[0]) {
- blocking = _PyLong_AsInt(args[0]);
- if (blocking == -1 && PyErr_Occurred()) {
+ blocking = PyObject_IsTrue(args[0]);
+ if (blocking < 0) {
goto exit;
}
if (!--noptargs) {
@@ -162,8 +162,8 @@ _multiprocessing_SemLock_acquire(SemLockObject *self, PyObject *const *args, Py_
goto skip_optional_pos;
}
if (args[0]) {
- blocking = _PyLong_AsInt(args[0]);
- if (blocking == -1 && PyErr_Occurred()) {
+ blocking = PyObject_IsTrue(args[0]);
+ if (blocking < 0) {
goto exit;
}
if (!--noptargs) {
@@ -275,8 +275,8 @@ _multiprocessing_SemLock(PyTypeObject *type, PyObject *args, PyObject *kwargs)
PyErr_SetString(PyExc_ValueError, "embedded null character");
goto exit;
}
- unlink = _PyLong_AsInt(fastargs[4]);
- if (unlink == -1 && PyErr_Occurred()) {
+ unlink = PyObject_IsTrue(fastargs[4]);
+ if (unlink < 0) {
goto exit;
}
return_value = _multiprocessing_SemLock_impl(type, kind, value, maxvalue, name, unlink);
@@ -542,4 +542,4 @@ _multiprocessing_SemLock___exit__(SemLockObject *self, PyObject *const *args, Py
#ifndef _MULTIPROCESSING_SEMLOCK___EXIT___METHODDEF
#define _MULTIPROCESSING_SEMLOCK___EXIT___METHODDEF
#endif /* !defined(_MULTIPROCESSING_SEMLOCK___EXIT___METHODDEF) */
-/*[clinic end generated code: output=720d7d0066dc0954 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=dae57a702cc01512 input=a9049054013a1b77]*/
diff --git a/Modules/_multiprocessing/semaphore.c b/Modules/_multiprocessing/semaphore.c
index 58fb0eb96aeeed..897b8db7110a41 100644
--- a/Modules/_multiprocessing/semaphore.c
+++ b/Modules/_multiprocessing/semaphore.c
@@ -79,7 +79,7 @@ _GetSemaphoreValue(HANDLE handle, long *value)
/*[clinic input]
_multiprocessing.SemLock.acquire
- block as blocking: bool(accept={int}) = True
+ block as blocking: bool = True
timeout as timeout_obj: object = None
Acquire the semaphore/lock.
@@ -88,7 +88,7 @@ Acquire the semaphore/lock.
static PyObject *
_multiprocessing_SemLock_acquire_impl(SemLockObject *self, int blocking,
PyObject *timeout_obj)
-/*[clinic end generated code: output=f9998f0b6b0b0872 input=86f05662cf753eb4]*/
+/*[clinic end generated code: output=f9998f0b6b0b0872 input=e5b45f5cbb775166]*/
{
double timeout;
DWORD res, full_msecs, nhandles;
@@ -295,7 +295,7 @@ sem_timedwait_save(sem_t *sem, struct timespec *deadline, PyThreadState *_save)
/*[clinic input]
_multiprocessing.SemLock.acquire
- block as blocking: bool(accept={int}) = True
+ block as blocking: bool = True
timeout as timeout_obj: object = None
Acquire the semaphore/lock.
@@ -304,7 +304,7 @@ Acquire the semaphore/lock.
static PyObject *
_multiprocessing_SemLock_acquire_impl(SemLockObject *self, int blocking,
PyObject *timeout_obj)
-/*[clinic end generated code: output=f9998f0b6b0b0872 input=86f05662cf753eb4]*/
+/*[clinic end generated code: output=f9998f0b6b0b0872 input=e5b45f5cbb775166]*/
{
int res, err = 0;
struct timespec deadline = {0};
@@ -474,14 +474,14 @@ _multiprocessing.SemLock.__new__
value: int
maxvalue: int
name: str
- unlink: bool(accept={int})
+ unlink: bool
[clinic start generated code]*/
static PyObject *
_multiprocessing_SemLock_impl(PyTypeObject *type, int kind, int value,
int maxvalue, const char *name, int unlink)
-/*[clinic end generated code: output=30727e38f5f7577a input=b378c3ee27d3a0fa]*/
+/*[clinic end generated code: output=30727e38f5f7577a input=fdaeb69814471c5b]*/
{
SEM_HANDLE handle = SEM_FAILED;
PyObject *result;
diff --git a/Modules/_posixsubprocess.c b/Modules/_posixsubprocess.c
index 717b1cf2202105..b7563ee8250a94 100644
--- a/Modules/_posixsubprocess.c
+++ b/Modules/_posixsubprocess.c
@@ -814,7 +814,7 @@ subprocess_fork_exec(PyObject *module, PyObject *args)
int allow_vfork;
if (!PyArg_ParseTuple(
- args, "OOpO!OOiiiiiiiiii" _Py_PARSE_PID "OOOiOp:fork_exec",
+ args, "OOpO!OOiiiiiiiipp" _Py_PARSE_PID "OOOiOp:fork_exec",
&process_args, &executable_list,
&close_fds, &PyTuple_Type, &py_fds_to_keep,
&cwd_obj, &env_list,
diff --git a/Modules/_sqlite/clinic/connection.c.h b/Modules/_sqlite/clinic/connection.c.h
index 1f9841c368b313..4c3fd1bd27411b 100644
--- a/Modules/_sqlite/clinic/connection.c.h
+++ b/Modules/_sqlite/clinic/connection.c.h
@@ -100,8 +100,8 @@ pysqlite_connection_init(PyObject *self, PyObject *args, PyObject *kwargs)
}
}
if (fastargs[4]) {
- check_same_thread = _PyLong_AsInt(fastargs[4]);
- if (check_same_thread == -1 && PyErr_Occurred()) {
+ check_same_thread = PyObject_IsTrue(fastargs[4]);
+ if (check_same_thread < 0) {
goto exit;
}
if (!--noptargs) {
@@ -305,8 +305,8 @@ blobopen(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs, PyO
goto skip_optional_kwonly;
}
if (args[3]) {
- readonly = _PyLong_AsInt(args[3]);
- if (readonly == -1 && PyErr_Occurred()) {
+ readonly = PyObject_IsTrue(args[3]);
+ if (readonly < 0) {
goto exit;
}
if (!--noptargs) {
@@ -831,8 +831,8 @@ pysqlite_connection_enable_load_extension(pysqlite_Connection *self, PyObject *a
PyObject *return_value = NULL;
int onoff;
- onoff = _PyLong_AsInt(arg);
- if (onoff == -1 && PyErr_Occurred()) {
+ onoff = PyObject_IsTrue(arg);
+ if (onoff < 0) {
goto exit;
}
return_value = pysqlite_connection_enable_load_extension_impl(self, onoff);
@@ -1532,4 +1532,4 @@ getlimit(pysqlite_Connection *self, PyObject *arg)
#ifndef DESERIALIZE_METHODDEF
#define DESERIALIZE_METHODDEF
#endif /* !defined(DESERIALIZE_METHODDEF) */
-/*[clinic end generated code: output=20e929a7a7d62a01 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=f10306e10427488b input=a9049054013a1b77]*/
diff --git a/Modules/_sqlite/connection.c b/Modules/_sqlite/connection.c
index 2854c1b5c31b2f..4c07d5e0b61f8c 100644
--- a/Modules/_sqlite/connection.c
+++ b/Modules/_sqlite/connection.c
@@ -197,7 +197,7 @@ _sqlite3.Connection.__init__ as pysqlite_connection_init
timeout: double = 5.0
detect_types: int = 0
isolation_level: IsolationLevel = ""
- check_same_thread: bool(accept={int}) = True
+ check_same_thread: bool = True
factory: object(c_default='(PyObject*)clinic_state()->ConnectionType') = ConnectionType
cached_statements as cache_size: int = 128
uri: bool = False
@@ -212,7 +212,7 @@ pysqlite_connection_init_impl(pysqlite_Connection *self, PyObject *database,
int check_same_thread, PyObject *factory,
int cache_size, int uri,
enum autocommit_mode autocommit)
-/*[clinic end generated code: output=cba057313ea7712f input=b21abce28ebcd304]*/
+/*[clinic end generated code: output=cba057313ea7712f input=9b0ab6c12f674fa3]*/
{
if (PySys_Audit("sqlite3.connect", "O", database) < 0) {
return -1;
@@ -485,7 +485,7 @@ _sqlite3.Connection.blobopen as blobopen
Row index.
/
*
- readonly: bool(accept={int}) = False
+ readonly: bool = False
Open the BLOB without write permissions.
name: str = "main"
Database name.
@@ -496,7 +496,7 @@ Open and return a BLOB object.
static PyObject *
blobopen_impl(pysqlite_Connection *self, const char *table, const char *col,
int row, int readonly, const char *name)
-/*[clinic end generated code: output=0c8e2e58516d0b5c input=1e7052516acfc94d]*/
+/*[clinic end generated code: output=0c8e2e58516d0b5c input=fa73c83aa7a7ddee]*/
{
if (!pysqlite_check_thread(self) || !pysqlite_check_connection(self)) {
return NULL;
@@ -1564,7 +1564,7 @@ pysqlite_connection_set_trace_callback_impl(pysqlite_Connection *self,
/*[clinic input]
_sqlite3.Connection.enable_load_extension as pysqlite_connection_enable_load_extension
- enable as onoff: bool(accept={int})
+ enable as onoff: bool
/
Enable dynamic loading of SQLite extension modules.
@@ -1573,7 +1573,7 @@ Enable dynamic loading of SQLite extension modules.
static PyObject *
pysqlite_connection_enable_load_extension_impl(pysqlite_Connection *self,
int onoff)
-/*[clinic end generated code: output=9cac37190d388baf input=5f00e93f7a9d3540]*/
+/*[clinic end generated code: output=9cac37190d388baf input=2a1e87931486380f]*/
{
int rc;
diff --git a/Modules/_ssl.c b/Modules/_ssl.c
index 2885774295b065..591eb91dd0f340 100644
--- a/Modules/_ssl.c
+++ b/Modules/_ssl.c
@@ -4168,7 +4168,7 @@ _ssl__SSLContext_load_dh_params(PySSLContext *self, PyObject *filepath)
/*[clinic input]
_ssl._SSLContext._wrap_socket
sock: object(subclass_of="get_state_ctx(self)->Sock_Type")
- server_side: int
+ server_side: bool
server_hostname as hostname_obj: object = None
*
owner: object = None
@@ -4180,7 +4180,7 @@ static PyObject *
_ssl__SSLContext__wrap_socket_impl(PySSLContext *self, PyObject *sock,
int server_side, PyObject *hostname_obj,
PyObject *owner, PyObject *session)
-/*[clinic end generated code: output=f103f238633940b4 input=f5916eadbc6eae81]*/
+/*[clinic end generated code: output=f103f238633940b4 input=700ca8fedff53994]*/
{
char *hostname = NULL;
PyObject *res;
@@ -4205,7 +4205,7 @@ _ssl__SSLContext__wrap_socket_impl(PySSLContext *self, PyObject *sock,
_ssl._SSLContext._wrap_bio
incoming: object(subclass_of="get_state_ctx(self)->PySSLMemoryBIO_Type", type="PySSLMemoryBIO *")
outgoing: object(subclass_of="get_state_ctx(self)->PySSLMemoryBIO_Type", type="PySSLMemoryBIO *")
- server_side: int
+ server_side: bool
server_hostname as hostname_obj: object = None
*
owner: object = None
@@ -4218,7 +4218,7 @@ _ssl__SSLContext__wrap_bio_impl(PySSLContext *self, PySSLMemoryBIO *incoming,
PySSLMemoryBIO *outgoing, int server_side,
PyObject *hostname_obj, PyObject *owner,
PyObject *session)
-/*[clinic end generated code: output=5c5d6d9b41f99332 input=331edeec9c738382]*/
+/*[clinic end generated code: output=5c5d6d9b41f99332 input=a9205d097fd45a82]*/
{
char *hostname = NULL;
PyObject *res;
diff --git a/Modules/_struct.c b/Modules/_struct.c
index 0cf34fbf9a3afa..3db7b991acd0a1 100644
--- a/Modules/_struct.c
+++ b/Modules/_struct.c
@@ -167,9 +167,6 @@ get_long(_structmodulestate *state, PyObject *v, long *p)
x = PyLong_AsLong(v);
Py_DECREF(v);
if (x == (long)-1 && PyErr_Occurred()) {
- if (PyErr_ExceptionMatches(PyExc_OverflowError))
- PyErr_SetString(state->StructError,
- "argument out of range");
return -1;
}
*p = x;
@@ -191,9 +188,6 @@ get_ulong(_structmodulestate *state, PyObject *v, unsigned long *p)
x = PyLong_AsUnsignedLong(v);
Py_DECREF(v);
if (x == (unsigned long)-1 && PyErr_Occurred()) {
- if (PyErr_ExceptionMatches(PyExc_OverflowError))
- PyErr_SetString(state->StructError,
- "argument out of range");
return -1;
}
*p = x;
@@ -214,9 +208,6 @@ get_longlong(_structmodulestate *state, PyObject *v, long long *p)
x = PyLong_AsLongLong(v);
Py_DECREF(v);
if (x == (long long)-1 && PyErr_Occurred()) {
- if (PyErr_ExceptionMatches(PyExc_OverflowError))
- PyErr_SetString(state->StructError,
- "argument out of range");
return -1;
}
*p = x;
@@ -237,9 +228,6 @@ get_ulonglong(_structmodulestate *state, PyObject *v, unsigned long long *p)
x = PyLong_AsUnsignedLongLong(v);
Py_DECREF(v);
if (x == (unsigned long long)-1 && PyErr_Occurred()) {
- if (PyErr_ExceptionMatches(PyExc_OverflowError))
- PyErr_SetString(state->StructError,
- "argument out of range");
return -1;
}
*p = x;
@@ -260,9 +248,6 @@ get_ssize_t(_structmodulestate *state, PyObject *v, Py_ssize_t *p)
x = PyLong_AsSsize_t(v);
Py_DECREF(v);
if (x == (Py_ssize_t)-1 && PyErr_Occurred()) {
- if (PyErr_ExceptionMatches(PyExc_OverflowError))
- PyErr_SetString(state->StructError,
- "argument out of range");
return -1;
}
*p = x;
@@ -283,9 +268,6 @@ get_size_t(_structmodulestate *state, PyObject *v, size_t *p)
x = PyLong_AsSize_t(v);
Py_DECREF(v);
if (x == (size_t)-1 && PyErr_Occurred()) {
- if (PyErr_ExceptionMatches(PyExc_OverflowError))
- PyErr_SetString(state->StructError,
- "argument out of range");
return -1;
}
*p = x;
@@ -293,7 +275,7 @@ get_size_t(_structmodulestate *state, PyObject *v, size_t *p)
}
-#define RANGE_ERROR(state, x, f, flag, mask) return _range_error(state, f, flag)
+#define RANGE_ERROR(state, f, flag) return _range_error(state, f, flag)
/* Floating point helpers */
@@ -545,12 +527,14 @@ static int
np_byte(_structmodulestate *state, char *p, PyObject *v, const formatdef *f)
{
long x;
- if (get_long(state, v, &x) < 0)
+ if (get_long(state, v, &x) < 0) {
+ if (PyErr_ExceptionMatches(PyExc_OverflowError)) {
+ RANGE_ERROR(state, f, 0);
+ }
return -1;
+ }
if (x < -128 || x > 127) {
- PyErr_SetString(state->StructError,
- "byte format requires -128 <= number <= 127");
- return -1;
+ RANGE_ERROR(state, f, 0);
}
*p = (char)x;
return 0;
@@ -560,12 +544,14 @@ static int
np_ubyte(_structmodulestate *state, char *p, PyObject *v, const formatdef *f)
{
long x;
- if (get_long(state, v, &x) < 0)
+ if (get_long(state, v, &x) < 0) {
+ if (PyErr_ExceptionMatches(PyExc_OverflowError)) {
+ RANGE_ERROR(state, f, 1);
+ }
return -1;
+ }
if (x < 0 || x > 255) {
- PyErr_SetString(state->StructError,
- "ubyte format requires 0 <= number <= 255");
- return -1;
+ RANGE_ERROR(state, f, 1);
}
*(unsigned char *)p = (unsigned char)x;
return 0;
@@ -588,13 +574,14 @@ np_short(_structmodulestate *state, char *p, PyObject *v, const formatdef *f)
{
long x;
short y;
- if (get_long(state, v, &x) < 0)
+ if (get_long(state, v, &x) < 0) {
+ if (PyErr_ExceptionMatches(PyExc_OverflowError)) {
+ RANGE_ERROR(state, f, 0);
+ }
return -1;
+ }
if (x < SHRT_MIN || x > SHRT_MAX) {
- PyErr_Format(state->StructError,
- "short format requires %d <= number <= %d",
- (int)SHRT_MIN, (int)SHRT_MAX);
- return -1;
+ RANGE_ERROR(state, f, 0);
}
y = (short)x;
memcpy(p, (char *)&y, sizeof y);
@@ -606,13 +593,14 @@ np_ushort(_structmodulestate *state, char *p, PyObject *v, const formatdef *f)
{
long x;
unsigned short y;
- if (get_long(state, v, &x) < 0)
+ if (get_long(state, v, &x) < 0) {
+ if (PyErr_ExceptionMatches(PyExc_OverflowError)) {
+ RANGE_ERROR(state, f, 1);
+ }
return -1;
+ }
if (x < 0 || x > USHRT_MAX) {
- PyErr_Format(state->StructError,
- "ushort format requires 0 <= number <= %u",
- (unsigned int)USHRT_MAX);
- return -1;
+ RANGE_ERROR(state, f, 1);
}
y = (unsigned short)x;
memcpy(p, (char *)&y, sizeof y);
@@ -624,11 +612,15 @@ np_int(_structmodulestate *state, char *p, PyObject *v, const formatdef *f)
{
long x;
int y;
- if (get_long(state, v, &x) < 0)
+ if (get_long(state, v, &x) < 0) {
+ if (PyErr_ExceptionMatches(PyExc_OverflowError)) {
+ RANGE_ERROR(state, f, 0);
+ }
return -1;
+ }
#if (SIZEOF_LONG > SIZEOF_INT)
if ((x < ((long)INT_MIN)) || (x > ((long)INT_MAX)))
- RANGE_ERROR(state, x, f, 0, -1);
+ RANGE_ERROR(state, f, 0);
#endif
y = (int)x;
memcpy(p, (char *)&y, sizeof y);
@@ -640,12 +632,16 @@ np_uint(_structmodulestate *state, char *p, PyObject *v, const formatdef *f)
{
unsigned long x;
unsigned int y;
- if (get_ulong(state, v, &x) < 0)
+ if (get_ulong(state, v, &x) < 0) {
+ if (PyErr_ExceptionMatches(PyExc_OverflowError)) {
+ RANGE_ERROR(state, f, 1);
+ }
return -1;
+ }
y = (unsigned int)x;
#if (SIZEOF_LONG > SIZEOF_INT)
if (x > ((unsigned long)UINT_MAX))
- RANGE_ERROR(state, y, f, 1, -1);
+ RANGE_ERROR(state, f, 1);
#endif
memcpy(p, (char *)&y, sizeof y);
return 0;
@@ -655,8 +651,12 @@ static int
np_long(_structmodulestate *state, char *p, PyObject *v, const formatdef *f)
{
long x;
- if (get_long(state, v, &x) < 0)
+ if (get_long(state, v, &x) < 0) {
+ if (PyErr_ExceptionMatches(PyExc_OverflowError)) {
+ RANGE_ERROR(state, f, 0);
+ }
return -1;
+ }
memcpy(p, (char *)&x, sizeof x);
return 0;
}
@@ -665,8 +665,12 @@ static int
np_ulong(_structmodulestate *state, char *p, PyObject *v, const formatdef *f)
{
unsigned long x;
- if (get_ulong(state, v, &x) < 0)
+ if (get_ulong(state, v, &x) < 0) {
+ if (PyErr_ExceptionMatches(PyExc_OverflowError)) {
+ RANGE_ERROR(state, f, 1);
+ }
return -1;
+ }
memcpy(p, (char *)&x, sizeof x);
return 0;
}
@@ -675,8 +679,12 @@ static int
np_ssize_t(_structmodulestate *state, char *p, PyObject *v, const formatdef *f)
{
Py_ssize_t x;
- if (get_ssize_t(state, v, &x) < 0)
+ if (get_ssize_t(state, v, &x) < 0) {
+ if (PyErr_ExceptionMatches(PyExc_OverflowError)) {
+ RANGE_ERROR(state, f, 0);
+ }
return -1;
+ }
memcpy(p, (char *)&x, sizeof x);
return 0;
}
@@ -685,8 +693,12 @@ static int
np_size_t(_structmodulestate *state, char *p, PyObject *v, const formatdef *f)
{
size_t x;
- if (get_size_t(state, v, &x) < 0)
+ if (get_size_t(state, v, &x) < 0) {
+ if (PyErr_ExceptionMatches(PyExc_OverflowError)) {
+ RANGE_ERROR(state, f, 1);
+ }
return -1;
+ }
memcpy(p, (char *)&x, sizeof x);
return 0;
}
@@ -695,8 +707,16 @@ static int
np_longlong(_structmodulestate *state, char *p, PyObject *v, const formatdef *f)
{
long long x;
- if (get_longlong(state, v, &x) < 0)
+ if (get_longlong(state, v, &x) < 0) {
+ if (PyErr_ExceptionMatches(PyExc_OverflowError)) {
+ PyErr_Format(state->StructError,
+ "'%c' format requires %lld <= number <= %lld",
+ f->format,
+ LLONG_MIN,
+ LLONG_MAX);
+ }
return -1;
+ }
memcpy(p, (char *)&x, sizeof x);
return 0;
}
@@ -705,8 +725,15 @@ static int
np_ulonglong(_structmodulestate *state, char *p, PyObject *v, const formatdef *f)
{
unsigned long long x;
- if (get_ulonglong(state, v, &x) < 0)
+ if (get_ulonglong(state, v, &x) < 0) {
+ if (PyErr_ExceptionMatches(PyExc_OverflowError)) {
+ PyErr_Format(state->StructError,
+ "'%c' format requires 0 <= number <= %llu",
+ f->format,
+ ULLONG_MAX);
+ }
return -1;
+ }
memcpy(p, (char *)&x, sizeof x);
return 0;
}
@@ -911,15 +938,19 @@ bp_int(_structmodulestate *state, char *p, PyObject *v, const formatdef *f)
long x;
Py_ssize_t i;
unsigned char *q = (unsigned char *)p;
- if (get_long(state, v, &x) < 0)
+ if (get_long(state, v, &x) < 0) {
+ if (PyErr_ExceptionMatches(PyExc_OverflowError)) {
+ RANGE_ERROR(state, f, 0);
+ }
return -1;
+ }
i = f->size;
if (i != SIZEOF_LONG) {
if ((i == 2) && (x < -32768 || x > 32767))
- RANGE_ERROR(state, x, f, 0, 0xffffL);
+ RANGE_ERROR(state, f, 0);
#if (SIZEOF_LONG != 4)
else if ((i == 4) && (x < -2147483648L || x > 2147483647L))
- RANGE_ERROR(state, x, f, 0, 0xffffffffL);
+ RANGE_ERROR(state, f, 0);
#endif
}
do {
@@ -935,14 +966,18 @@ bp_uint(_structmodulestate *state, char *p, PyObject *v, const formatdef *f)
unsigned long x;
Py_ssize_t i;
unsigned char *q = (unsigned char *)p;
- if (get_ulong(state, v, &x) < 0)
+ if (get_ulong(state, v, &x) < 0) {
+ if (PyErr_ExceptionMatches(PyExc_OverflowError)) {
+ RANGE_ERROR(state, f, 1);
+ }
return -1;
+ }
i = f->size;
if (i != SIZEOF_LONG) {
unsigned long maxint = 1;
maxint <<= (unsigned long)(i * 8);
if (x >= maxint)
- RANGE_ERROR(state, x, f, 1, maxint - 1);
+ RANGE_ERROR(state, f, 1);
}
do {
q[--i] = (unsigned char)(x & 0xffUL);
@@ -964,6 +999,14 @@ bp_longlong(_structmodulestate *state, char *p, PyObject *v, const formatdef *f)
0, /* little_endian */
1 /* signed */);
Py_DECREF(v);
+ if (res == -1 && PyErr_Occurred()) {
+ PyErr_Format(state->StructError,
+ "'%c' format requires %lld <= number <= %lld",
+ f->format,
+ LLONG_MIN,
+ LLONG_MAX);
+ return -1;
+ }
return res;
}
@@ -980,6 +1023,13 @@ bp_ulonglong(_structmodulestate *state, char *p, PyObject *v, const formatdef *f
0, /* little_endian */
0 /* signed */);
Py_DECREF(v);
+ if (res == -1 && PyErr_Occurred()) {
+ PyErr_Format(state->StructError,
+ "'%c' format requires 0 <= number <= %llu",
+ f->format,
+ ULLONG_MAX);
+ return -1;
+ }
return res;
}
@@ -1148,15 +1198,19 @@ lp_int(_structmodulestate *state, char *p, PyObject *v, const formatdef *f)
long x;
Py_ssize_t i;
unsigned char *q = (unsigned char *)p;
- if (get_long(state, v, &x) < 0)
+ if (get_long(state, v, &x) < 0) {
+ if (PyErr_ExceptionMatches(PyExc_OverflowError)) {
+ RANGE_ERROR(state, f, 0);
+ }
return -1;
+ }
i = f->size;
if (i != SIZEOF_LONG) {
if ((i == 2) && (x < -32768 || x > 32767))
- RANGE_ERROR(state, x, f, 0, 0xffffL);
+ RANGE_ERROR(state, f, 0);
#if (SIZEOF_LONG != 4)
else if ((i == 4) && (x < -2147483648L || x > 2147483647L))
- RANGE_ERROR(state, x, f, 0, 0xffffffffL);
+ RANGE_ERROR(state, f, 0);
#endif
}
do {
@@ -1172,14 +1226,18 @@ lp_uint(_structmodulestate *state, char *p, PyObject *v, const formatdef *f)
unsigned long x;
Py_ssize_t i;
unsigned char *q = (unsigned char *)p;
- if (get_ulong(state, v, &x) < 0)
+ if (get_ulong(state, v, &x) < 0) {
+ if (PyErr_ExceptionMatches(PyExc_OverflowError)) {
+ RANGE_ERROR(state, f, 1);
+ }
return -1;
+ }
i = f->size;
if (i != SIZEOF_LONG) {
unsigned long maxint = 1;
maxint <<= (unsigned long)(i * 8);
if (x >= maxint)
- RANGE_ERROR(state, x, f, 1, maxint - 1);
+ RANGE_ERROR(state, f, 1);
}
do {
*q++ = (unsigned char)(x & 0xffUL);
@@ -1201,6 +1259,14 @@ lp_longlong(_structmodulestate *state, char *p, PyObject *v, const formatdef *f)
1, /* little_endian */
1 /* signed */);
Py_DECREF(v);
+ if (res == -1 && PyErr_Occurred()) {
+ PyErr_Format(state->StructError,
+ "'%c' format requires %lld <= number <= %lld",
+ f->format,
+ LLONG_MIN,
+ LLONG_MAX);
+ return -1;
+ }
return res;
}
@@ -1217,6 +1283,13 @@ lp_ulonglong(_structmodulestate *state, char *p, PyObject *v, const formatdef *f
1, /* little_endian */
0 /* signed */);
Py_DECREF(v);
+ if (res == -1 && PyErr_Occurred()) {
+ PyErr_Format(state->StructError,
+ "'%c' format requires 0 <= number <= %llu",
+ f->format,
+ ULLONG_MAX);
+ return -1;
+ }
return res;
}
diff --git a/Modules/_testcapi/watchers.c b/Modules/_testcapi/watchers.c
index 608cd780d12a26..1d91c206f63092 100644
--- a/Modules/_testcapi/watchers.c
+++ b/Modules/_testcapi/watchers.c
@@ -2,6 +2,7 @@
#define Py_BUILD_CORE
#include "pycore_function.h" // FUNC_MAX_WATCHERS
+#include "pycore_code.h" // CODE_MAX_WATCHERS
// Test dict watching
static PyObject *g_dict_watch_events;
@@ -277,6 +278,135 @@ unwatch_type(PyObject *self, PyObject *args)
Py_RETURN_NONE;
}
+
+// Test code object watching
+
+#define NUM_CODE_WATCHERS 2
+static int num_code_object_created_events[NUM_CODE_WATCHERS] = {0, 0};
+static int num_code_object_destroyed_events[NUM_CODE_WATCHERS] = {0, 0};
+
+static int
+handle_code_object_event(int which_watcher, PyCodeEvent event, PyCodeObject *co) {
+ if (event == PY_CODE_EVENT_CREATE) {
+ num_code_object_created_events[which_watcher]++;
+ }
+ else if (event == PY_CODE_EVENT_DESTROY) {
+ num_code_object_destroyed_events[which_watcher]++;
+ }
+ else {
+ return -1;
+ }
+ return 0;
+}
+
+static int
+first_code_object_callback(PyCodeEvent event, PyCodeObject *co)
+{
+ return handle_code_object_event(0, event, co);
+}
+
+static int
+second_code_object_callback(PyCodeEvent event, PyCodeObject *co)
+{
+ return handle_code_object_event(1, event, co);
+}
+
+static int
+noop_code_event_handler(PyCodeEvent event, PyCodeObject *co)
+{
+ return 0;
+}
+
+static PyObject *
+add_code_watcher(PyObject *self, PyObject *which_watcher)
+{
+ int watcher_id;
+ assert(PyLong_Check(which_watcher));
+ long which_l = PyLong_AsLong(which_watcher);
+ if (which_l == 0) {
+ watcher_id = PyCode_AddWatcher(first_code_object_callback);
+ num_code_object_created_events[0] = 0;
+ num_code_object_destroyed_events[0] = 0;
+ }
+ else if (which_l == 1) {
+ watcher_id = PyCode_AddWatcher(second_code_object_callback);
+ num_code_object_created_events[1] = 0;
+ num_code_object_destroyed_events[1] = 0;
+ }
+ else {
+ return NULL;
+ }
+ if (watcher_id < 0) {
+ return NULL;
+ }
+ return PyLong_FromLong(watcher_id);
+}
+
+static PyObject *
+clear_code_watcher(PyObject *self, PyObject *watcher_id)
+{
+ assert(PyLong_Check(watcher_id));
+ long watcher_id_l = PyLong_AsLong(watcher_id);
+ if (PyCode_ClearWatcher(watcher_id_l) < 0) {
+ return NULL;
+ }
+ // reset static events counters
+ if (watcher_id_l >= 0 && watcher_id_l < NUM_CODE_WATCHERS) {
+ num_code_object_created_events[watcher_id_l] = 0;
+ num_code_object_destroyed_events[watcher_id_l] = 0;
+ }
+ Py_RETURN_NONE;
+}
+
+static PyObject *
+get_code_watcher_num_created_events(PyObject *self, PyObject *watcher_id)
+{
+ assert(PyLong_Check(watcher_id));
+ long watcher_id_l = PyLong_AsLong(watcher_id);
+ assert(watcher_id_l >= 0 && watcher_id_l < NUM_CODE_WATCHERS);
+ return PyLong_FromLong(num_code_object_created_events[watcher_id_l]);
+}
+
+static PyObject *
+get_code_watcher_num_destroyed_events(PyObject *self, PyObject *watcher_id)
+{
+ assert(PyLong_Check(watcher_id));
+ long watcher_id_l = PyLong_AsLong(watcher_id);
+ assert(watcher_id_l >= 0 && watcher_id_l < NUM_CODE_WATCHERS);
+ return PyLong_FromLong(num_code_object_destroyed_events[watcher_id_l]);
+}
+
+static PyObject *
+allocate_too_many_code_watchers(PyObject *self, PyObject *args)
+{
+ int watcher_ids[CODE_MAX_WATCHERS + 1];
+ int num_watchers = 0;
+ for (unsigned long i = 0; i < sizeof(watcher_ids) / sizeof(int); i++) {
+ int watcher_id = PyCode_AddWatcher(noop_code_event_handler);
+ if (watcher_id == -1) {
+ break;
+ }
+ watcher_ids[i] = watcher_id;
+ num_watchers++;
+ }
+ PyObject *type, *value, *traceback;
+ PyErr_Fetch(&type, &value, &traceback);
+ for (int i = 0; i < num_watchers; i++) {
+ if (PyCode_ClearWatcher(watcher_ids[i]) < 0) {
+ PyErr_WriteUnraisable(Py_None);
+ break;
+ }
+ }
+ if (type) {
+ PyErr_Restore(type, value, traceback);
+ return NULL;
+ }
+ else if (PyErr_Occurred()) {
+ return NULL;
+ }
+ Py_RETURN_NONE;
+}
+
// Test function watchers
#define NUM_FUNC_WATCHERS 2
@@ -509,6 +639,16 @@ static PyMethodDef test_methods[] = {
{"unwatch_type", unwatch_type, METH_VARARGS, NULL},
{"get_type_modified_events", get_type_modified_events, METH_NOARGS, NULL},
+ // Code object watchers.
+ {"add_code_watcher", add_code_watcher, METH_O, NULL},
+ {"clear_code_watcher", clear_code_watcher, METH_O, NULL},
+ {"get_code_watcher_num_created_events",
+ get_code_watcher_num_created_events, METH_O, NULL},
+ {"get_code_watcher_num_destroyed_events",
+ get_code_watcher_num_destroyed_events, METH_O, NULL},
+ {"allocate_too_many_code_watchers",
+ (PyCFunction) allocate_too_many_code_watchers, METH_NOARGS, NULL},
+
// Function watchers.
{"add_func_watcher", add_func_watcher, METH_O, NULL},
{"clear_func_watcher", clear_func_watcher, METH_O, NULL},
diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c
index 3617fafe9b4fdd..83eef73a875d9d 100644
--- a/Modules/_testcapimodule.c
+++ b/Modules/_testcapimodule.c
@@ -2589,6 +2589,91 @@ test_set_type_size(PyObject *self, PyObject *Py_UNUSED(ignored))
}
+// Test Py_CLEAR() macro
+static PyObject*
+test_py_clear(PyObject *self, PyObject *Py_UNUSED(ignored))
+{
+ // simple case with a variable
+ PyObject *obj = PyList_New(0);
+ if (obj == NULL) {
+ return NULL;
+ }
+ Py_CLEAR(obj);
+ assert(obj == NULL);
+
+ // gh-98724: complex case, Py_CLEAR() argument has a side effect
+ PyObject* array[1];
+ array[0] = PyList_New(0);
+ if (array[0] == NULL) {
+ return NULL;
+ }
+
+ PyObject **p = array;
+ Py_CLEAR(*p++);
+ assert(array[0] == NULL);
+ assert(p == array + 1);
+
+ Py_RETURN_NONE;
+}
+
+
+// Test Py_SETREF() and Py_XSETREF() macros, similar to test_py_clear()
+static PyObject*
+test_py_setref(PyObject *self, PyObject *Py_UNUSED(ignored))
+{
+ // Py_SETREF() simple case with a variable
+ PyObject *obj = PyList_New(0);
+ if (obj == NULL) {
+ return NULL;
+ }
+ Py_SETREF(obj, NULL);
+ assert(obj == NULL);
+
+ // Py_XSETREF() simple case with a variable
+ PyObject *obj2 = PyList_New(0);
+ if (obj2 == NULL) {
+ return NULL;
+ }
+ Py_XSETREF(obj2, NULL);
+ assert(obj2 == NULL);
+ // test Py_XSETREF() when the argument is NULL
+ Py_XSETREF(obj2, NULL);
+ assert(obj2 == NULL);
+
+ // gh-98724: complex case, Py_SETREF() argument has a side effect
+ PyObject* array[1];
+ array[0] = PyList_New(0);
+ if (array[0] == NULL) {
+ return NULL;
+ }
+
+ PyObject **p = array;
+ Py_SETREF(*p++, NULL);
+ assert(array[0] == NULL);
+ assert(p == array + 1);
+
+ // gh-98724: complex case, Py_XSETREF() argument has a side effect
+ PyObject* array2[1];
+ array2[0] = PyList_New(0);
+ if (array2[0] == NULL) {
+ return NULL;
+ }
+
+ PyObject **p2 = array2;
+ Py_XSETREF(*p2++, NULL);
+ assert(array2[0] == NULL);
+ assert(p2 == array2 + 1);
+
+ // test Py_XSETREF() when the argument is NULL
+ p2 = array2;
+ Py_XSETREF(*p2++, NULL);
+ assert(array2[0] == NULL);
+ assert(p2 == array2 + 1);
+
+ Py_RETURN_NONE;
+}
+
+
#define TEST_REFCOUNT() \
do { \
PyObject *obj = PyList_New(0); \
@@ -3252,6 +3337,8 @@ static PyMethodDef TestMethods[] = {
{"pynumber_tobase", pynumber_tobase, METH_VARARGS},
{"without_gc", without_gc, METH_O},
{"test_set_type_size", test_set_type_size, METH_NOARGS},
+ {"test_py_clear", test_py_clear, METH_NOARGS},
+ {"test_py_setref", test_py_setref, METH_NOARGS},
{"test_refcount_macros", test_refcount_macros, METH_NOARGS},
{"test_refcount_funcs", test_refcount_funcs, METH_NOARGS},
{"test_py_is_macros", test_py_is_macros, METH_NOARGS},
diff --git a/Modules/_threadmodule.c b/Modules/_threadmodule.c
index ec8b6d881124da..d323ca83dcffa7 100644
--- a/Modules/_threadmodule.c
+++ b/Modules/_threadmodule.c
@@ -135,7 +135,7 @@ lock_acquire_parse_args(PyObject *args, PyObject *kwds,
*timeout = unset_timeout ;
- if (!PyArg_ParseTupleAndKeywords(args, kwds, "|iO:acquire", kwlist,
+ if (!PyArg_ParseTupleAndKeywords(args, kwds, "|pO:acquire", kwlist,
&blocking, &timeout_obj))
return -1;
diff --git a/Modules/_tkinter.c b/Modules/_tkinter.c
index 93d4474f65d62c..d4a129058702a2 100644
--- a/Modules/_tkinter.c
+++ b/Modules/_tkinter.c
@@ -2832,7 +2832,7 @@ Tkapp_WantObjects(PyObject *self, PyObject *args)
{
int wantobjects = -1;
- if (!PyArg_ParseTuple(args, "|i:wantobjects", &wantobjects))
+ if (!PyArg_ParseTuple(args, "|p:wantobjects", &wantobjects))
return NULL;
if (wantobjects == -1)
return PyBool_FromLong(((TkappObject*)self)->wantobjects);
@@ -2978,11 +2978,11 @@ _tkinter.create
screenName: str(accept={str, NoneType}) = None
baseName: str = ""
className: str = "Tk"
- interactive: bool(accept={int}) = False
- wantobjects: bool(accept={int}) = False
- wantTk: bool(accept={int}) = True
+ interactive: bool = False
+ wantobjects: bool = False
+ wantTk: bool = True
if false, then Tk_Init() doesn't get called
- sync: bool(accept={int}) = False
+ sync: bool = False
if true, then pass -sync to wish
use: str(accept={str, NoneType}) = None
if not None, then pass -use to wish
@@ -2995,7 +2995,7 @@ _tkinter_create_impl(PyObject *module, const char *screenName,
const char *baseName, const char *className,
int interactive, int wantobjects, int wantTk, int sync,
const char *use)
-/*[clinic end generated code: output=e3315607648e6bb4 input=da9b17ee7358d862]*/
+/*[clinic end generated code: output=e3315607648e6bb4 input=09afef9adea70a19]*/
{
/* XXX baseName is not used anymore;
* try getting rid of it. */
diff --git a/Modules/_tracemalloc.c b/Modules/_tracemalloc.c
index 0d70f0cf34c8d6..ac16626f2101ba 100644
--- a/Modules/_tracemalloc.c
+++ b/Modules/_tracemalloc.c
@@ -20,9 +20,6 @@ module _tracemalloc
_Py_DECLARE_STR(anon_unknown, "");
-/* Trace memory blocks allocated by PyMem_RawMalloc() */
-#define TRACE_RAW_MALLOC
-
/* Forward declaration */
static void tracemalloc_stop(void);
static void* raw_malloc(size_t size);
@@ -35,19 +32,14 @@ static void raw_free(void *ptr);
#define TO_PTR(key) ((const void *)(uintptr_t)(key))
#define FROM_PTR(key) ((uintptr_t)(key))
-/* Protected by the GIL */
-static struct {
- PyMemAllocatorEx mem;
- PyMemAllocatorEx raw;
- PyMemAllocatorEx obj;
-} allocators;
+#define allocators _PyRuntime.tracemalloc.allocators
#if defined(TRACE_RAW_MALLOC)
/* This lock is needed because tracemalloc_free() is called without
the GIL held from PyMem_RawFree(). It cannot acquire the lock because it
would introduce a deadlock in _PyThreadState_DeleteCurrent(). */
-static PyThread_type_lock tables_lock;
+# define tables_lock _PyRuntime.tracemalloc.tables_lock
# define TABLES_LOCK() PyThread_acquire_lock(tables_lock, 1)
# define TABLES_UNLOCK() PyThread_release_lock(tables_lock)
#else
@@ -59,33 +51,8 @@ static PyThread_type_lock tables_lock;
#define DEFAULT_DOMAIN 0
-/* Pack the frame_t structure to reduce the memory footprint on 64-bit
- architectures: 12 bytes instead of 16. */
-typedef struct
-#ifdef __GNUC__
-__attribute__((packed))
-#elif defined(_MSC_VER)
-#pragma pack(push, 4)
-#endif
-{
- /* filename cannot be NULL: "" is used if the Python frame
- filename is NULL */
- PyObject *filename;
- unsigned int lineno;
-} frame_t;
-#ifdef _MSC_VER
-#pragma pack(pop)
-#endif
-
-
-typedef struct {
- Py_uhash_t hash;
- /* Number of frames stored */
- uint16_t nframe;
- /* Total number of frames the traceback had */
- uint16_t total_nframe;
- frame_t frames[1];
-} traceback_t;
+typedef struct tracemalloc_frame frame_t;
+typedef struct tracemalloc_traceback traceback_t;
#define TRACEBACK_SIZE(NFRAME) \
(sizeof(traceback_t) + sizeof(frame_t) * (NFRAME - 1))
@@ -96,7 +63,8 @@ typedef struct {
static const unsigned long MAX_NFRAME = Py_MIN(UINT16_MAX, ((SIZE_MAX - sizeof(traceback_t)) / sizeof(frame_t) + 1));
-static traceback_t tracemalloc_empty_traceback;
+#define tracemalloc_empty_traceback _PyRuntime.tracemalloc.empty_traceback
+
/* Trace of a memory block */
typedef struct {
@@ -108,35 +76,13 @@ typedef struct {
} trace_t;
-/* Size in bytes of currently traced memory.
- Protected by TABLES_LOCK(). */
-static size_t tracemalloc_traced_memory = 0;
-
-/* Peak size in bytes of traced memory.
- Protected by TABLES_LOCK(). */
-static size_t tracemalloc_peak_traced_memory = 0;
-
-/* Hash table used as a set to intern filenames:
- PyObject* => PyObject*.
- Protected by the GIL */
-static _Py_hashtable_t *tracemalloc_filenames = NULL;
-
-/* Buffer to store a new traceback in traceback_new().
- Protected by the GIL. */
-static traceback_t *tracemalloc_traceback = NULL;
-
-/* Hash table used as a set to intern tracebacks:
- traceback_t* => traceback_t*
- Protected by the GIL */
-static _Py_hashtable_t *tracemalloc_tracebacks = NULL;
-
-/* pointer (void*) => trace (trace_t*).
- Protected by TABLES_LOCK(). */
-static _Py_hashtable_t *tracemalloc_traces = NULL;
-
-/* domain (unsigned int) => traces (_Py_hashtable_t).
- Protected by TABLES_LOCK(). */
-static _Py_hashtable_t *tracemalloc_domains = NULL;
+#define tracemalloc_traced_memory _PyRuntime.tracemalloc.traced_memory
+#define tracemalloc_peak_traced_memory _PyRuntime.tracemalloc.peak_traced_memory
+#define tracemalloc_filenames _PyRuntime.tracemalloc.filenames
+#define tracemalloc_traceback _PyRuntime.tracemalloc.traceback
+#define tracemalloc_tracebacks _PyRuntime.tracemalloc.tracebacks
+#define tracemalloc_traces _PyRuntime.tracemalloc.traces
+#define tracemalloc_domains _PyRuntime.tracemalloc.domains
#ifdef TRACE_DEBUG
@@ -157,7 +103,7 @@ tracemalloc_error(const char *format, ...)
#if defined(TRACE_RAW_MALLOC)
#define REENTRANT_THREADLOCAL
-static Py_tss_t tracemalloc_reentrant_key = Py_tss_NEEDS_INIT;
+#define tracemalloc_reentrant_key _PyRuntime.tracemalloc.reentrant_key
/* Any non-NULL pointer can be used */
#define REENTRANT Py_True
diff --git a/Modules/_winapi.c b/Modules/_winapi.c
index bb4514c36bc7d0..f4d982b15d402a 100644
--- a/Modules/_winapi.c
+++ b/Modules/_winapi.c
@@ -404,13 +404,13 @@ _winapi_CloseHandle_impl(PyObject *module, HANDLE handle)
_winapi.ConnectNamedPipe
handle: HANDLE
- overlapped as use_overlapped: bool(accept={int}) = False
+ overlapped as use_overlapped: bool = False
[clinic start generated code]*/
static PyObject *
_winapi_ConnectNamedPipe_impl(PyObject *module, HANDLE handle,
int use_overlapped)
-/*[clinic end generated code: output=335a0e7086800671 input=34f937c1c86e5e68]*/
+/*[clinic end generated code: output=335a0e7086800671 input=a80e56e8bd370e31]*/
{
BOOL success;
OverlappedObject *overlapped = NULL;
@@ -1576,13 +1576,13 @@ _winapi.ReadFile
handle: HANDLE
size: DWORD
- overlapped as use_overlapped: bool(accept={int}) = False
+ overlapped as use_overlapped: bool = False
[clinic start generated code]*/
static PyObject *
_winapi_ReadFile_impl(PyObject *module, HANDLE handle, DWORD size,
int use_overlapped)
-/*[clinic end generated code: output=d3d5b44a8201b944 input=08c439d03a11aac5]*/
+/*[clinic end generated code: output=d3d5b44a8201b944 input=4f82f8e909ad91ad]*/
{
DWORD nread;
PyObject *buf;
@@ -1862,13 +1862,13 @@ _winapi.WriteFile
handle: HANDLE
buffer: object
- overlapped as use_overlapped: bool(accept={int}) = False
+ overlapped as use_overlapped: bool = False
[clinic start generated code]*/
static PyObject *
_winapi_WriteFile_impl(PyObject *module, HANDLE handle, PyObject *buffer,
int use_overlapped)
-/*[clinic end generated code: output=2ca80f6bf3fa92e3 input=11eae2a03aa32731]*/
+/*[clinic end generated code: output=2ca80f6bf3fa92e3 input=2badb008c8a2e2a0]*/
{
Py_buffer _buf, *buf;
DWORD len, written;
diff --git a/Modules/_xxsubinterpretersmodule.c b/Modules/_xxsubinterpretersmodule.c
index 2c9e0cda1ab048..0892fa3a9595e8 100644
--- a/Modules/_xxsubinterpretersmodule.c
+++ b/Modules/_xxsubinterpretersmodule.c
@@ -6,11 +6,15 @@
#endif
#include "Python.h"
+// XXX This module should not rely on internal API.
#include "pycore_frame.h"
#include "pycore_pystate.h" // _PyThreadState_GET()
#include "pycore_interpreteridobject.h"
+#define MODULE_NAME "_xxsubinterpreters"
+
+
static char *
_copy_raw_string(PyObject *strobj)
{
@@ -28,13 +32,186 @@ _copy_raw_string(PyObject *strobj)
}
static PyInterpreterState *
-_get_current(void)
+_get_current_interp(void)
{
// PyInterpreterState_Get() aborts if lookup fails, so don't need
// to check the result for NULL.
return PyInterpreterState_Get();
}
+static PyObject *
+_get_current_module(void)
+{
+ // We ensured it was imported in _run_script().
+ PyObject *name = PyUnicode_FromString(MODULE_NAME);
+ if (name == NULL) {
+ return NULL;
+ }
+ PyObject *mod = PyImport_GetModule(name);
+ Py_DECREF(name);
+ if (mod == NULL) {
+ return NULL;
+ }
+ assert(mod != Py_None);
+ return mod;
+}
+
+static PyObject *
+get_module_from_owned_type(PyTypeObject *cls)
+{
+ assert(cls != NULL);
+ return _get_current_module();
+ // XXX Use the more efficient API now that we use heap types:
+ //return PyType_GetModule(cls);
+}
+
+static struct PyModuleDef moduledef;
+
+static PyObject *
+get_module_from_type(PyTypeObject *cls)
+{
+ assert(cls != NULL);
+ return _get_current_module();
+ // XXX Use the more efficient API now that we use heap types:
+ //return PyType_GetModuleByDef(cls, &moduledef);
+}
+
+static PyObject *
+add_new_exception(PyObject *mod, const char *name, PyObject *base)
+{
+ assert(!PyObject_HasAttrString(mod, name));
+ PyObject *exctype = PyErr_NewException(name, base, NULL);
+ if (exctype == NULL) {
+ return NULL;
+ }
+ int res = PyModule_AddType(mod, (PyTypeObject *)exctype);
+ if (res < 0) {
+ Py_DECREF(exctype);
+ return NULL;
+ }
+ return exctype;
+}
+
+#define ADD_NEW_EXCEPTION(MOD, NAME, BASE) \
+ add_new_exception(MOD, MODULE_NAME "." Py_STRINGIFY(NAME), BASE)
+
+static PyTypeObject *
+add_new_type(PyObject *mod, PyType_Spec *spec, crossinterpdatafunc shared)
+{
+ PyTypeObject *cls = (PyTypeObject *)PyType_FromMetaclass(
+ NULL, mod, spec, NULL);
+ if (cls == NULL) {
+ return NULL;
+ }
+ if (PyModule_AddType(mod, cls) < 0) {
+ Py_DECREF(cls);
+ return NULL;
+ }
+ if (shared != NULL) {
+ if (_PyCrossInterpreterData_RegisterClass(cls, shared)) {
+ Py_DECREF(cls);
+ return NULL;
+ }
+ }
+ return cls;
+}
+
+static int
+_release_xid_data(_PyCrossInterpreterData *data, int ignoreexc)
+{
+ PyObject *exctype, *excval, *exctb;
+ if (ignoreexc) {
+ PyErr_Fetch(&exctype, &excval, &exctb);
+ }
+ int res = _PyCrossInterpreterData_Release(data);
+ if (res < 0) {
+ // XXX Fix this!
+ /* The owning interpreter is already destroyed.
+ * Ideally, this shouldn't ever happen. When an interpreter is
+ * about to be destroyed, we should clear out all of its objects
+ * from every channel associated with that interpreter.
+ * For now we hack around that to resolve refleaks, by decref'ing
+ * the released object here, even if its the wrong interpreter.
+ * The owning interpreter has already been destroyed
+ * so we should be okay, especially since the currently
+ * shareable types are all very basic, with no GC.
+ * That said, it becomes much messier once interpreters
+ * no longer share a GIL, so this needs to be fixed before then. */
+ _PyCrossInterpreterData_Clear(NULL, data);
+ if (ignoreexc) {
+ // XXX Emit a warning?
+ PyErr_Clear();
+ }
+ }
+ if (ignoreexc) {
+ PyErr_Restore(exctype, excval, exctb);
+ }
+ return res;
+}
+
+
+/* module state *************************************************************/
+
+typedef struct {
+ PyTypeObject *ChannelIDType;
+
+ /* interpreter exceptions */
+ PyObject *RunFailedError;
+
+ /* channel exceptions */
+ PyObject *ChannelError;
+ PyObject *ChannelNotFoundError;
+ PyObject *ChannelClosedError;
+ PyObject *ChannelEmptyError;
+ PyObject *ChannelNotEmptyError;
+} module_state;
+
+static inline module_state *
+get_module_state(PyObject *mod)
+{
+ assert(mod != NULL);
+ module_state *state = PyModule_GetState(mod);
+ assert(state != NULL);
+ return state;
+}
+
+static int
+traverse_module_state(module_state *state, visitproc visit, void *arg)
+{
+ /* heap types */
+ Py_VISIT(state->ChannelIDType);
+
+ /* interpreter exceptions */
+ Py_VISIT(state->RunFailedError);
+
+ /* channel exceptions */
+ Py_VISIT(state->ChannelError);
+ Py_VISIT(state->ChannelNotFoundError);
+ Py_VISIT(state->ChannelClosedError);
+ Py_VISIT(state->ChannelEmptyError);
+ Py_VISIT(state->ChannelNotEmptyError);
+ return 0;
+}
+
+static int
+clear_module_state(module_state *state)
+{
+ /* heap types */
+ (void)_PyCrossInterpreterData_UnregisterClass(state->ChannelIDType);
+ Py_CLEAR(state->ChannelIDType);
+
+ /* interpreter exceptions */
+ Py_CLEAR(state->RunFailedError);
+
+ /* channel exceptions */
+ Py_CLEAR(state->ChannelError);
+ Py_CLEAR(state->ChannelNotFoundError);
+ Py_CLEAR(state->ChannelClosedError);
+ Py_CLEAR(state->ChannelEmptyError);
+ Py_CLEAR(state->ChannelNotEmptyError);
+ return 0;
+}
+
/* data-sharing-specific code ***********************************************/
@@ -66,7 +243,7 @@ _sharednsitem_clear(struct _sharednsitem *item)
PyMem_Free(item->name);
item->name = NULL;
}
- _PyCrossInterpreterData_Release(&item->data);
+ (void)_release_xid_data(&item->data, 1);
}
static int
@@ -121,8 +298,10 @@ _sharedns_free(_sharedns *shared)
}
static _sharedns *
-_get_shared_ns(PyObject *shareable)
+_get_shared_ns(PyObject *shareable, PyTypeObject *channelidtype,
+ int *needs_import)
{
+ *needs_import = 0;
if (shareable == NULL || shareable == Py_None) {
return NULL;
}
@@ -144,6 +323,9 @@ _get_shared_ns(PyObject *shareable)
if (_sharednsitem_init(&shared->items[i], key, value) != 0) {
break;
}
+ if (Py_TYPE(value) == channelidtype) {
+ *needs_import = 1;
+ }
}
if (PyErr_Occurred()) {
_sharedns_free(shared);
@@ -287,68 +469,97 @@ _sharedexception_apply(_sharedexception *exc, PyObject *wrapperclass)
#define CHANNEL_BOTH 0
#define CHANNEL_RECV -1
-static PyObject *ChannelError;
-static PyObject *ChannelNotFoundError;
-static PyObject *ChannelClosedError;
-static PyObject *ChannelEmptyError;
-static PyObject *ChannelNotEmptyError;
+/* channel errors */
+
+#define ERR_CHANNEL_NOT_FOUND -2
+#define ERR_CHANNEL_CLOSED -3
+#define ERR_CHANNEL_INTERP_CLOSED -4
+#define ERR_CHANNEL_EMPTY -5
+#define ERR_CHANNEL_NOT_EMPTY -6
+#define ERR_CHANNEL_MUTEX_INIT -7
+#define ERR_CHANNELS_MUTEX_INIT -8
+#define ERR_NO_NEXT_CHANNEL_ID -9
static int
-channel_exceptions_init(PyObject *ns)
+channel_exceptions_init(PyObject *mod)
{
- // XXX Move the exceptions into per-module memory?
-
- // A channel-related operation failed.
- ChannelError = PyErr_NewException("_xxsubinterpreters.ChannelError",
- PyExc_RuntimeError, NULL);
- if (ChannelError == NULL) {
- return -1;
- }
- if (PyDict_SetItemString(ns, "ChannelError", ChannelError) != 0) {
+ module_state *state = get_module_state(mod);
+ if (state == NULL) {
return -1;
}
- // An operation tried to use a channel that doesn't exist.
- ChannelNotFoundError = PyErr_NewException(
- "_xxsubinterpreters.ChannelNotFoundError", ChannelError, NULL);
- if (ChannelNotFoundError == NULL) {
- return -1;
- }
- if (PyDict_SetItemString(ns, "ChannelNotFoundError", ChannelNotFoundError) != 0) {
- return -1;
- }
+#define ADD(NAME, BASE) \
+ do { \
+ assert(state->NAME == NULL); \
+ state->NAME = ADD_NEW_EXCEPTION(mod, NAME, BASE); \
+ if (state->NAME == NULL) { \
+ return -1; \
+ } \
+ } while (0)
+ // A channel-related operation failed.
+ ADD(ChannelError, PyExc_RuntimeError);
+ // An operation tried to use a channel that doesn't exist.
+ ADD(ChannelNotFoundError, state->ChannelError);
// An operation tried to use a closed channel.
- ChannelClosedError = PyErr_NewException(
- "_xxsubinterpreters.ChannelClosedError", ChannelError, NULL);
- if (ChannelClosedError == NULL) {
- return -1;
- }
- if (PyDict_SetItemString(ns, "ChannelClosedError", ChannelClosedError) != 0) {
- return -1;
- }
-
+ ADD(ChannelClosedError, state->ChannelError);
// An operation tried to pop from an empty channel.
- ChannelEmptyError = PyErr_NewException(
- "_xxsubinterpreters.ChannelEmptyError", ChannelError, NULL);
- if (ChannelEmptyError == NULL) {
- return -1;
+ ADD(ChannelEmptyError, state->ChannelError);
+ // An operation tried to close a non-empty channel.
+ ADD(ChannelNotEmptyError, state->ChannelError);
+#undef ADD
+
+ return 0;
+}
+
+static int
+handle_channel_error(int err, PyObject *mod, int64_t cid)
+{
+ if (err == 0) {
+ assert(!PyErr_Occurred());
+ return 0;
}
- if (PyDict_SetItemString(ns, "ChannelEmptyError", ChannelEmptyError) != 0) {
- return -1;
+ assert(err < 0);
+ module_state *state = get_module_state(mod);
+ assert(state != NULL);
+ if (err == ERR_CHANNEL_NOT_FOUND) {
+ PyErr_Format(state->ChannelNotFoundError,
+ "channel %" PRId64 " not found", cid);
+ }
+ else if (err == ERR_CHANNEL_CLOSED) {
+ PyErr_Format(state->ChannelClosedError,
+ "channel %" PRId64 " is closed", cid);
+ }
+ else if (err == ERR_CHANNEL_INTERP_CLOSED) {
+ PyErr_Format(state->ChannelClosedError,
+ "channel %" PRId64 " is already closed", cid);
+ }
+ else if (err == ERR_CHANNEL_EMPTY) {
+ PyErr_Format(state->ChannelEmptyError,
+ "channel %" PRId64 " is empty", cid);
+ }
+ else if (err == ERR_CHANNEL_NOT_EMPTY) {
+ PyErr_Format(state->ChannelNotEmptyError,
+ "channel %" PRId64 " may not be closed "
+ "if not empty (try force=True)",
+ cid);
+ }
+ else if (err == ERR_CHANNEL_MUTEX_INIT) {
+ PyErr_SetString(state->ChannelError,
+ "can't initialize mutex for new channel");
}
-
- // An operation tried to close a non-empty channel.
- ChannelNotEmptyError = PyErr_NewException(
- "_xxsubinterpreters.ChannelNotEmptyError", ChannelError, NULL);
- if (ChannelNotEmptyError == NULL) {
- return -1;
+ else if (err == ERR_CHANNELS_MUTEX_INIT) {
+ PyErr_SetString(state->ChannelError,
+ "can't initialize mutex for channel management");
}
- if (PyDict_SetItemString(ns, "ChannelNotEmptyError", ChannelNotEmptyError) != 0) {
- return -1;
+ else if (err == ERR_NO_NEXT_CHANNEL_ID) {
+ PyErr_SetString(state->ChannelError,
+ "failed to get a channel ID");
}
-
- return 0;
+ else {
+ assert(PyErr_Occurred());
+ }
+ return 1;
}
/* the channel queue */
@@ -377,7 +588,7 @@ static void
_channelitem_clear(_channelitem *item)
{
if (item->data != NULL) {
- _PyCrossInterpreterData_Release(item->data);
+ (void)_release_xid_data(item->data, 1);
PyMem_Free(item->data);
item->data = NULL;
}
@@ -621,8 +832,7 @@ _channelends_associate(_channelends *ends, int64_t interp, int send)
interp, &prev);
if (end != NULL) {
if (!end->open) {
- PyErr_SetString(ChannelClosedError, "channel already closed");
- return -1;
+ return ERR_CHANNEL_CLOSED;
}
// already associated
return 0;
@@ -721,19 +931,13 @@ typedef struct _channel {
} _PyChannelState;
static _PyChannelState *
-_channel_new(void)
+_channel_new(PyThread_type_lock mutex)
{
_PyChannelState *chan = PyMem_NEW(_PyChannelState, 1);
if (chan == NULL) {
return NULL;
}
- chan->mutex = PyThread_allocate_lock();
- if (chan->mutex == NULL) {
- PyMem_Free(chan);
- PyErr_SetString(ChannelError,
- "can't initialize mutex for new channel");
- return NULL;
- }
+ chan->mutex = mutex;
chan->queue = _channelqueue_new();
if (chan->queue == NULL) {
PyMem_Free(chan);
@@ -771,10 +975,11 @@ _channel_add(_PyChannelState *chan, int64_t interp,
PyThread_acquire_lock(chan->mutex, WAIT_LOCK);
if (!chan->open) {
- PyErr_SetString(ChannelClosedError, "channel closed");
+ res = ERR_CHANNEL_CLOSED;
goto done;
}
if (_channelends_associate(chan->ends, interp, 1) != 0) {
+ res = ERR_CHANNEL_INTERP_CLOSED;
goto done;
}
@@ -788,31 +993,34 @@ _channel_add(_PyChannelState *chan, int64_t interp,
return res;
}
-static _PyCrossInterpreterData *
-_channel_next(_PyChannelState *chan, int64_t interp)
+static int
+_channel_next(_PyChannelState *chan, int64_t interp,
+ _PyCrossInterpreterData **res)
{
- _PyCrossInterpreterData *data = NULL;
+ int err = 0;
PyThread_acquire_lock(chan->mutex, WAIT_LOCK);
if (!chan->open) {
- PyErr_SetString(ChannelClosedError, "channel closed");
+ err = ERR_CHANNEL_CLOSED;
goto done;
}
if (_channelends_associate(chan->ends, interp, 0) != 0) {
+ err = ERR_CHANNEL_INTERP_CLOSED;
goto done;
}
- data = _channelqueue_get(chan->queue);
+ _PyCrossInterpreterData *data = _channelqueue_get(chan->queue);
if (data == NULL && !PyErr_Occurred() && chan->closing != NULL) {
chan->open = 0;
}
+ *res = data;
done:
PyThread_release_lock(chan->mutex);
if (chan->queue->count == 0) {
_channel_finish_closing(chan);
}
- return data;
+ return err;
}
static int
@@ -822,7 +1030,7 @@ _channel_close_interpreter(_PyChannelState *chan, int64_t interp, int end)
int res = -1;
if (!chan->open) {
- PyErr_SetString(ChannelClosedError, "channel already closed");
+ res = ERR_CHANNEL_CLOSED;
goto done;
}
@@ -844,13 +1052,12 @@ _channel_close_all(_PyChannelState *chan, int end, int force)
PyThread_acquire_lock(chan->mutex, WAIT_LOCK);
if (!chan->open) {
- PyErr_SetString(ChannelClosedError, "channel already closed");
+ res = ERR_CHANNEL_CLOSED;
goto done;
}
if (!force && chan->queue->count > 0) {
- PyErr_SetString(ChannelNotEmptyError,
- "may not be closed if not empty (try force=True)");
+ res = ERR_CHANNEL_NOT_EMPTY;
goto done;
}
@@ -935,21 +1142,24 @@ typedef struct _channels {
int64_t next_id;
} _channels;
-static int
-_channels_init(_channels *channels)
+static void
+_channels_init(_channels *channels, PyThread_type_lock mutex)
{
- if (channels->mutex == NULL) {
- channels->mutex = PyThread_allocate_lock();
- if (channels->mutex == NULL) {
- PyErr_SetString(ChannelError,
- "can't initialize mutex for channel management");
- return -1;
- }
- }
+ channels->mutex = mutex;
channels->head = NULL;
channels->numopen = 0;
channels->next_id = 0;
- return 0;
+}
+
+static void
+_channels_fini(_channels *channels)
+{
+ assert(channels->numopen == 0);
+ assert(channels->head == NULL);
+ if (channels->mutex != NULL) {
+ PyThread_free_lock(channels->mutex);
+ channels->mutex = NULL;
+ }
}
static int64_t
@@ -958,17 +1168,17 @@ _channels_next_id(_channels *channels) // needs lock
int64_t id = channels->next_id;
if (id < 0) {
/* overflow */
- PyErr_SetString(ChannelError,
- "failed to get a channel ID");
return -1;
}
channels->next_id += 1;
return id;
}
-static _PyChannelState *
-_channels_lookup(_channels *channels, int64_t id, PyThread_type_lock *pmutex)
+static int
+_channels_lookup(_channels *channels, int64_t id, PyThread_type_lock *pmutex,
+ _PyChannelState **res)
{
+ int err = -1;
_PyChannelState *chan = NULL;
PyThread_acquire_lock(channels->mutex, WAIT_LOCK);
if (pmutex != NULL) {
@@ -977,11 +1187,11 @@ _channels_lookup(_channels *channels, int64_t id, PyThread_type_lock *pmutex)
_channelref *ref = _channelref_find(channels->head, id, NULL);
if (ref == NULL) {
- PyErr_Format(ChannelNotFoundError, "channel %" PRId64 " not found", id);
+ err = ERR_CHANNEL_NOT_FOUND;
goto done;
}
if (ref->chan == NULL || !ref->chan->open) {
- PyErr_Format(ChannelClosedError, "channel %" PRId64 " closed", id);
+ err = ERR_CHANNEL_CLOSED;
goto done;
}
@@ -991,11 +1201,14 @@ _channels_lookup(_channels *channels, int64_t id, PyThread_type_lock *pmutex)
}
chan = ref->chan;
+ err = 0;
+
done:
if (pmutex == NULL || *pmutex == NULL) {
PyThread_release_lock(channels->mutex);
}
- return chan;
+ *res = chan;
+ return err;
}
static int64_t
@@ -1007,6 +1220,7 @@ _channels_add(_channels *channels, _PyChannelState *chan)
// Create a new ref.
int64_t id = _channels_next_id(channels);
if (id < 0) {
+ cid = ERR_NO_NEXT_CHANNEL_ID;
goto done;
}
_channelref *ref = _channelref_new(id, chan);
@@ -1041,31 +1255,32 @@ _channels_close(_channels *channels, int64_t cid, _PyChannelState **pchan,
_channelref *ref = _channelref_find(channels->head, cid, NULL);
if (ref == NULL) {
- PyErr_Format(ChannelNotFoundError, "channel %" PRId64 " not found", cid);
+ res = ERR_CHANNEL_NOT_FOUND;
goto done;
}
if (ref->chan == NULL) {
- PyErr_Format(ChannelClosedError, "channel %" PRId64 " closed", cid);
+ res = ERR_CHANNEL_CLOSED;
goto done;
}
else if (!force && end == CHANNEL_SEND && ref->chan->closing != NULL) {
- PyErr_Format(ChannelClosedError, "channel %" PRId64 " closed", cid);
+ res = ERR_CHANNEL_CLOSED;
goto done;
}
else {
- if (_channel_close_all(ref->chan, end, force) != 0) {
- if (end == CHANNEL_SEND &&
- PyErr_ExceptionMatches(ChannelNotEmptyError)) {
+ int err = _channel_close_all(ref->chan, end, force);
+ if (err != 0) {
+ if (end == CHANNEL_SEND && err == ERR_CHANNEL_NOT_EMPTY) {
if (ref->chan->closing != NULL) {
- PyErr_Format(ChannelClosedError,
- "channel %" PRId64 " closed", cid);
+ res = ERR_CHANNEL_CLOSED;
goto done;
}
// Mark the channel as closing and return. The channel
// will be cleaned up in _channel_next().
PyErr_Clear();
- if (_channel_set_closing(ref, channels->mutex) != 0) {
+ int err = _channel_set_closing(ref, channels->mutex);
+ if (err != 0) {
+ res = err;
goto done;
}
if (pchan != NULL) {
@@ -1073,6 +1288,9 @@ _channels_close(_channels *channels, int64_t cid, _PyChannelState **pchan,
}
res = 0;
}
+ else {
+ res = err;
+ }
goto done;
}
if (pchan != NULL) {
@@ -1121,7 +1339,7 @@ _channels_remove(_channels *channels, int64_t id, _PyChannelState **pchan)
_channelref *prev = NULL;
_channelref *ref = _channelref_find(channels->head, id, &prev);
if (ref == NULL) {
- PyErr_Format(ChannelNotFoundError, "channel %" PRId64 " not found", id);
+ res = ERR_CHANNEL_NOT_FOUND;
goto done;
}
@@ -1141,7 +1359,7 @@ _channels_add_id_object(_channels *channels, int64_t id)
_channelref *ref = _channelref_find(channels->head, id, NULL);
if (ref == NULL) {
- PyErr_Format(ChannelNotFoundError, "channel %" PRId64 " not found", id);
+ res = ERR_CHANNEL_NOT_FOUND;
goto done;
}
ref->objcount += 1;
@@ -1215,7 +1433,7 @@ _channel_set_closing(struct _channelref *ref, PyThread_type_lock mutex) {
int res = -1;
PyThread_acquire_lock(chan->mutex, WAIT_LOCK);
if (chan->closing != NULL) {
- PyErr_SetString(ChannelClosedError, "channel closed");
+ res = ERR_CHANNEL_CLOSED;
goto done;
}
chan->closing = PyMem_NEW(struct _channel_closing, 1);
@@ -1258,14 +1476,18 @@ _channel_finish_closing(struct _channel *chan) {
static int64_t
_channel_create(_channels *channels)
{
- _PyChannelState *chan = _channel_new();
+ PyThread_type_lock mutex = PyThread_allocate_lock();
+ if (mutex == NULL) {
+ return ERR_CHANNEL_MUTEX_INIT;
+ }
+ _PyChannelState *chan = _channel_new(mutex);
if (chan == NULL) {
+ PyThread_free_lock(mutex);
return -1;
}
int64_t id = _channels_add(channels, chan);
if (id < 0) {
_channel_free(chan);
- return -1;
}
return id;
}
@@ -1274,8 +1496,9 @@ static int
_channel_destroy(_channels *channels, int64_t id)
{
_PyChannelState *chan = NULL;
- if (_channels_remove(channels, id, &chan) != 0) {
- return -1;
+ int err = _channels_remove(channels, id, &chan);
+ if (err != 0) {
+ return err;
}
if (chan != NULL) {
_channel_free(chan);
@@ -1286,23 +1509,24 @@ _channel_destroy(_channels *channels, int64_t id)
static int
_channel_send(_channels *channels, int64_t id, PyObject *obj)
{
- PyInterpreterState *interp = _get_current();
+ PyInterpreterState *interp = _get_current_interp();
if (interp == NULL) {
return -1;
}
// Look up the channel.
PyThread_type_lock mutex = NULL;
- _PyChannelState *chan = _channels_lookup(channels, id, &mutex);
- if (chan == NULL) {
- return -1;
+ _PyChannelState *chan = NULL;
+ int err = _channels_lookup(channels, id, &mutex, &chan);
+ if (err != 0) {
+ return err;
}
+ assert(chan != NULL);
// Past this point we are responsible for releasing the mutex.
if (chan->closing != NULL) {
- PyErr_Format(ChannelClosedError, "channel %" PRId64 " closed", id);
PyThread_release_lock(mutex);
- return -1;
+ return ERR_CHANNEL_CLOSED;
}
// Convert the object to cross-interpreter data.
@@ -1321,61 +1545,87 @@ _channel_send(_channels *channels, int64_t id, PyObject *obj)
int res = _channel_add(chan, PyInterpreterState_GetID(interp), data);
PyThread_release_lock(mutex);
if (res != 0) {
- _PyCrossInterpreterData_Release(data);
+ // We may chain an exception here:
+ (void)_release_xid_data(data, 0);
PyMem_Free(data);
- return -1;
+ return res;
}
return 0;
}
-static PyObject *
-_channel_recv(_channels *channels, int64_t id)
+static int
+_channel_recv(_channels *channels, int64_t id, PyObject **res)
{
- PyInterpreterState *interp = _get_current();
+ int err;
+ *res = NULL;
+
+ PyInterpreterState *interp = _get_current_interp();
if (interp == NULL) {
- return NULL;
+ // XXX Is this always an error?
+ if (PyErr_Occurred()) {
+ return -1;
+ }
+ return 0;
}
// Look up the channel.
PyThread_type_lock mutex = NULL;
- _PyChannelState *chan = _channels_lookup(channels, id, &mutex);
- if (chan == NULL) {
- return NULL;
+ _PyChannelState *chan = NULL;
+ err = _channels_lookup(channels, id, &mutex, &chan);
+ if (err != 0) {
+ return err;
}
+ assert(chan != NULL);
// Past this point we are responsible for releasing the mutex.
// Pop off the next item from the channel.
- _PyCrossInterpreterData *data = _channel_next(chan, PyInterpreterState_GetID(interp));
+ _PyCrossInterpreterData *data = NULL;
+ err = _channel_next(chan, PyInterpreterState_GetID(interp), &data);
PyThread_release_lock(mutex);
- if (data == NULL) {
- return NULL;
+ if (err != 0) {
+ return err;
+ }
+ else if (data == NULL) {
+ assert(!PyErr_Occurred());
+ return 0;
}
// Convert the data back to an object.
PyObject *obj = _PyCrossInterpreterData_NewObject(data);
- _PyCrossInterpreterData_Release(data);
- PyMem_Free(data);
if (obj == NULL) {
- return NULL;
+ assert(PyErr_Occurred());
+ (void)_release_xid_data(data, 1);
+ PyMem_Free(data);
+ return -1;
+ }
+ int release_res = _release_xid_data(data, 0);
+ PyMem_Free(data);
+ if (release_res < 0) {
+ // The source interpreter has been destroyed already.
+ assert(PyErr_Occurred());
+ Py_DECREF(obj);
+ return -1;
}
- return obj;
+ *res = obj;
+ return 0;
}
static int
_channel_drop(_channels *channels, int64_t id, int send, int recv)
{
- PyInterpreterState *interp = _get_current();
+ PyInterpreterState *interp = _get_current_interp();
if (interp == NULL) {
return -1;
}
// Look up the channel.
PyThread_type_lock mutex = NULL;
- _PyChannelState *chan = _channels_lookup(channels, id, &mutex);
- if (chan == NULL) {
- return -1;
+ _PyChannelState *chan = NULL;
+ int err = _channels_lookup(channels, id, &mutex, &chan);
+ if (err != 0) {
+ return err;
}
// Past this point we are responsible for releasing the mutex.
@@ -1395,12 +1645,13 @@ static int
_channel_is_associated(_channels *channels, int64_t cid, int64_t interp,
int send)
{
- _PyChannelState *chan = _channels_lookup(channels, cid, NULL);
- if (chan == NULL) {
- return -1;
- } else if (send && chan->closing != NULL) {
- PyErr_Format(ChannelClosedError, "channel %" PRId64 " closed", cid);
- return -1;
+ _PyChannelState *chan = NULL;
+ int err = _channels_lookup(channels, cid, NULL, &chan);
+ if (err != 0) {
+ return err;
+ }
+ else if (send && chan->closing != NULL) {
+ return ERR_CHANNEL_CLOSED;
}
_channelend *end = _channelend_find(send ? chan->ends->send : chan->ends->recv,
@@ -1411,8 +1662,6 @@ _channel_is_associated(_channels *channels, int64_t cid, int64_t interp,
/* ChannelID class */
-static PyTypeObject ChannelIDtype;
-
typedef struct channelid {
PyObject_HEAD
int64_t id;
@@ -1421,11 +1670,19 @@ typedef struct channelid {
_channels *channels;
} channelid;
+struct channel_id_converter_data {
+ PyObject *module;
+ int64_t cid;
+};
+
static int
channel_id_converter(PyObject *arg, void *ptr)
{
int64_t cid;
- if (PyObject_TypeCheck(arg, &ChannelIDtype)) {
+ struct channel_id_converter_data *data = ptr;
+ module_state *state = get_module_state(data->module);
+ assert(state != NULL);
+ if (PyObject_TypeCheck(arg, state->ChannelIDType)) {
cid = ((channelid *)arg)->id;
}
else if (PyIndex_Check(arg)) {
@@ -1445,51 +1702,62 @@ channel_id_converter(PyObject *arg, void *ptr)
Py_TYPE(arg)->tp_name);
return 0;
}
- *(int64_t *)ptr = cid;
+ data->cid = cid;
return 1;
}
-static channelid *
+static int
newchannelid(PyTypeObject *cls, int64_t cid, int end, _channels *channels,
- int force, int resolve)
+ int force, int resolve, channelid **res)
{
+ *res = NULL;
+
channelid *self = PyObject_New(channelid, cls);
if (self == NULL) {
- return NULL;
+ return -1;
}
self->id = cid;
self->end = end;
self->resolve = resolve;
self->channels = channels;
- if (_channels_add_id_object(channels, cid) != 0) {
- if (force && PyErr_ExceptionMatches(ChannelNotFoundError)) {
- PyErr_Clear();
+ int err = _channels_add_id_object(channels, cid);
+ if (err != 0) {
+ if (force && err == ERR_CHANNEL_NOT_FOUND) {
+ assert(!PyErr_Occurred());
}
else {
Py_DECREF((PyObject *)self);
- return NULL;
+ return err;
}
}
- return self;
+ *res = self;
+ return 0;
}
static _channels * _global_channels(void);
static PyObject *
-channelid_new(PyTypeObject *cls, PyObject *args, PyObject *kwds)
+_channelid_new(PyObject *mod, PyTypeObject *cls,
+ PyObject *args, PyObject *kwds)
{
static char *kwlist[] = {"id", "send", "recv", "force", "_resolve", NULL};
int64_t cid;
+ struct channel_id_converter_data cid_data = {
+ .module = mod,
+ };
int send = -1;
int recv = -1;
int force = 0;
int resolve = 0;
if (!PyArg_ParseTupleAndKeywords(args, kwds,
"O&|$pppp:ChannelID.__new__", kwlist,
- channel_id_converter, &cid, &send, &recv, &force, &resolve))
+ channel_id_converter, &cid_data,
+ &send, &recv, &force, &resolve)) {
return NULL;
+ }
+ cid = cid_data.cid;
// Handle "send" and "recv".
if (send == 0 && recv == 0) {
@@ -1508,16 +1776,33 @@ channelid_new(PyTypeObject *cls, PyObject *args, PyObject *kwds)
end = CHANNEL_RECV;
}
- return (PyObject *)newchannelid(cls, cid, end, _global_channels(),
- force, resolve);
+ PyObject *id = NULL;
+ int err = newchannelid(cls, cid, end, _global_channels(),
+ force, resolve,
+ (channelid **)&id);
+ if (handle_channel_error(err, mod, cid)) {
+ assert(id == NULL);
+ return NULL;
+ }
+ assert(id != NULL);
+ return id;
}
static void
-channelid_dealloc(PyObject *v)
+channelid_dealloc(PyObject *self)
{
- int64_t cid = ((channelid *)v)->id;
- _channels *channels = ((channelid *)v)->channels;
- Py_TYPE(v)->tp_free(v);
+ int64_t cid = ((channelid *)self)->id;
+ _channels *channels = ((channelid *)self)->channels;
+
+ PyTypeObject *tp = Py_TYPE(self);
+ tp->tp_free(self);
+ /* "Instances of heap-allocated types hold a reference to their type."
+ * See: https://docs.python.org/3.11/howto/isolating-extensions.html#garbage-collection-protocol
+ * See: https://docs.python.org/3.11/c-api/typeobj.html#c.PyTypeObject.tp_traverse
+ */
+ // XXX Why don't we implement Py_TPFLAGS_HAVE_GC, e.g. Py_tp_traverse,
+ // like we do for _abc._abc_data?
+ Py_DECREF(tp);
_channels_drop_id_object(channels, cid);
}
@@ -1556,46 +1841,6 @@ channelid_int(PyObject *self)
return PyLong_FromLongLong(cid->id);
}
-static PyNumberMethods channelid_as_number = {
- 0, /* nb_add */
- 0, /* nb_subtract */
- 0, /* nb_multiply */
- 0, /* nb_remainder */
- 0, /* nb_divmod */
- 0, /* nb_power */
- 0, /* nb_negative */
- 0, /* nb_positive */
- 0, /* nb_absolute */
- 0, /* nb_bool */
- 0, /* nb_invert */
- 0, /* nb_lshift */
- 0, /* nb_rshift */
- 0, /* nb_and */
- 0, /* nb_xor */
- 0, /* nb_or */
- (unaryfunc)channelid_int, /* nb_int */
- 0, /* nb_reserved */
- 0, /* nb_float */
-
- 0, /* nb_inplace_add */
- 0, /* nb_inplace_subtract */
- 0, /* nb_inplace_multiply */
- 0, /* nb_inplace_remainder */
- 0, /* nb_inplace_power */
- 0, /* nb_inplace_lshift */
- 0, /* nb_inplace_rshift */
- 0, /* nb_inplace_and */
- 0, /* nb_inplace_xor */
- 0, /* nb_inplace_or */
-
- 0, /* nb_floor_divide */
- 0, /* nb_true_divide */
- 0, /* nb_inplace_floor_divide */
- 0, /* nb_inplace_true_divide */
-
- (unaryfunc)channelid_int, /* nb_index */
-};
-
static Py_hash_t
channelid_hash(PyObject *self)
{
@@ -1612,17 +1857,28 @@ channelid_hash(PyObject *self)
static PyObject *
channelid_richcompare(PyObject *self, PyObject *other, int op)
{
+ PyObject *res = NULL;
if (op != Py_EQ && op != Py_NE) {
Py_RETURN_NOTIMPLEMENTED;
}
- if (!PyObject_TypeCheck(self, &ChannelIDtype)) {
- Py_RETURN_NOTIMPLEMENTED;
+ PyObject *mod = get_module_from_type(Py_TYPE(self));
+ if (mod == NULL) {
+ return NULL;
+ }
+ module_state *state = get_module_state(mod);
+ if (state == NULL) {
+ goto done;
+ }
+
+ if (!PyObject_TypeCheck(self, state->ChannelIDType)) {
+ res = Py_NewRef(Py_NotImplemented);
+ goto done;
}
channelid *cid = (channelid *)self;
int equal;
- if (PyObject_TypeCheck(other, &ChannelIDtype)) {
+ if (PyObject_TypeCheck(other, state->ChannelIDType)) {
channelid *othercid = (channelid *)other;
equal = (cid->end == othercid->end) && (cid->id == othercid->id);
}
@@ -1631,27 +1887,34 @@ channelid_richcompare(PyObject *self, PyObject *other, int op)
int overflow;
long long othercid = PyLong_AsLongLongAndOverflow(other, &overflow);
if (othercid == -1 && PyErr_Occurred()) {
- return NULL;
+ goto done;
}
equal = !overflow && (othercid >= 0) && (cid->id == othercid);
}
else if (PyNumber_Check(other)) {
PyObject *pyid = PyLong_FromLongLong(cid->id);
if (pyid == NULL) {
- return NULL;
+ goto done;
}
- PyObject *res = PyObject_RichCompare(pyid, other, op);
+ res = PyObject_RichCompare(pyid, other, op);
Py_DECREF(pyid);
- return res;
+ goto done;
}
else {
- Py_RETURN_NOTIMPLEMENTED;
+ res = Py_NewRef(Py_NotImplemented);
+ goto done;
}
if ((op == Py_EQ && equal) || (op == Py_NE && !equal)) {
- Py_RETURN_TRUE;
+ res = Py_NewRef(Py_True);
}
- Py_RETURN_FALSE;
+ else {
+ res = Py_NewRef(Py_False);
+ }
+
+done:
+ Py_DECREF(mod);
+ return res;
}
static PyObject *
@@ -1690,41 +1953,63 @@ static PyObject *
_channelid_from_xid(_PyCrossInterpreterData *data)
{
struct _channelid_xid *xid = (struct _channelid_xid *)data->data;
+
+ PyObject *mod = _get_current_module();
+ if (mod == NULL) {
+ return NULL;
+ }
+ module_state *state = get_module_state(mod);
+ if (state == NULL) {
+ return NULL;
+ }
+
// Note that we do not preserve the "resolve" flag.
- PyObject *cid = (PyObject *)newchannelid(&ChannelIDtype, xid->id, xid->end,
- _global_channels(), 0, 0);
+ PyObject *cid = NULL;
+ int err = newchannelid(state->ChannelIDType, xid->id, xid->end,
+ _global_channels(), 0, 0,
+ (channelid **)&cid);
+ if (err != 0) {
+ assert(cid == NULL);
+ (void)handle_channel_error(err, mod, xid->id);
+ goto done;
+ }
+ assert(cid != NULL);
if (xid->end == 0) {
- return cid;
+ goto done;
}
if (!xid->resolve) {
- return cid;
+ goto done;
}
/* Try returning a high-level channel end but fall back to the ID. */
PyObject *chan = _channel_from_cid(cid, xid->end);
if (chan == NULL) {
PyErr_Clear();
- return cid;
+ goto done;
}
Py_DECREF(cid);
- return chan;
+ cid = chan;
+
+done:
+ Py_DECREF(mod);
+ return cid;
}
static int
-_channelid_shared(PyObject *obj, _PyCrossInterpreterData *data)
-{
- struct _channelid_xid *xid = PyMem_NEW(struct _channelid_xid, 1);
- if (xid == NULL) {
+_channelid_shared(PyThreadState *tstate, PyObject *obj,
+ _PyCrossInterpreterData *data)
+{
+ if (_PyCrossInterpreterData_InitWithSize(
+ data, tstate->interp, sizeof(struct _channelid_xid), obj,
+ _channelid_from_xid
+ ) < 0)
+ {
return -1;
}
+ struct _channelid_xid *xid = (struct _channelid_xid *)data->data;
xid->id = ((channelid *)obj)->id;
xid->end = ((channelid *)obj)->end;
xid->resolve = ((channelid *)obj)->resolve;
-
- data->data = xid;
- data->obj = Py_NewRef(obj);
- data->new_object = _channelid_from_xid;
- data->free = PyMem_Free;
return 0;
}
@@ -1734,8 +2019,22 @@ channelid_end(PyObject *self, void *end)
int force = 1;
channelid *cid = (channelid *)self;
if (end != NULL) {
- return (PyObject *)newchannelid(Py_TYPE(self), cid->id, *(int *)end,
- cid->channels, force, cid->resolve);
+ PyObject *id = NULL;
+ int err = newchannelid(Py_TYPE(self), cid->id, *(int *)end,
+ cid->channels, force, cid->resolve,
+ (channelid **)&id);
+ if (err != 0) {
+ assert(id == NULL);
+ PyObject *mod = get_module_from_type(Py_TYPE(self));
+ if (mod == NULL) {
+ return NULL;
+ }
+ (void)handle_channel_error(err, mod, cid->id);
+ Py_DECREF(mod);
+ return NULL;
+ }
+ assert(id != NULL);
+ return id;
}
if (cid->end == CHANNEL_SEND) {
@@ -1763,66 +2062,52 @@ static PyGetSetDef channelid_getsets[] = {
PyDoc_STRVAR(channelid_doc,
"A channel ID identifies a channel and may be used as an int.");
-static PyTypeObject ChannelIDtype = {
- PyVarObject_HEAD_INIT(&PyType_Type, 0)
- "_xxsubinterpreters.ChannelID", /* tp_name */
- sizeof(channelid), /* tp_basicsize */
- 0, /* tp_itemsize */
- (destructor)channelid_dealloc, /* tp_dealloc */
- 0, /* tp_vectorcall_offset */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_as_async */
- (reprfunc)channelid_repr, /* tp_repr */
- &channelid_as_number, /* tp_as_number */
- 0, /* tp_as_sequence */
- 0, /* tp_as_mapping */
- channelid_hash, /* tp_hash */
- 0, /* tp_call */
- (reprfunc)channelid_str, /* tp_str */
- 0, /* tp_getattro */
- 0, /* tp_setattro */
- 0, /* tp_as_buffer */
- // Use Py_TPFLAGS_DISALLOW_INSTANTIATION so the type cannot be instantiated
- // from Python code. We do this because there is a strong relationship
- // between channel IDs and the channel lifecycle, so this limitation avoids
- // related complications. Use the _channel_id() function instead.
- Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE
- | Py_TPFLAGS_DISALLOW_INSTANTIATION, /* tp_flags */
- channelid_doc, /* tp_doc */
- 0, /* tp_traverse */
- 0, /* tp_clear */
- channelid_richcompare, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- 0, /* tp_iter */
- 0, /* tp_iternext */
- 0, /* tp_methods */
- 0, /* tp_members */
- channelid_getsets, /* tp_getset */
+static PyType_Slot ChannelIDType_slots[] = {
+ {Py_tp_dealloc, (destructor)channelid_dealloc},
+ {Py_tp_doc, (void *)channelid_doc},
+ {Py_tp_repr, (reprfunc)channelid_repr},
+ {Py_tp_str, (reprfunc)channelid_str},
+ {Py_tp_hash, channelid_hash},
+ {Py_tp_richcompare, channelid_richcompare},
+ {Py_tp_getset, channelid_getsets},
+ // number slots
+ {Py_nb_int, (unaryfunc)channelid_int},
+ {Py_nb_index, (unaryfunc)channelid_int},
+ {0, NULL},
};
+static PyType_Spec ChannelIDType_spec = {
+ .name = "_xxsubinterpreters.ChannelID",
+ .basicsize = sizeof(channelid),
+ .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE |
+ Py_TPFLAGS_DISALLOW_INSTANTIATION | Py_TPFLAGS_IMMUTABLETYPE),
+ .slots = ChannelIDType_slots,
+};
-/* interpreter-specific code ************************************************/
-static PyObject * RunFailedError = NULL;
+/* interpreter-specific code ************************************************/
static int
-interp_exceptions_init(PyObject *ns)
+interp_exceptions_init(PyObject *mod)
{
- // XXX Move the exceptions into per-module memory?
-
- if (RunFailedError == NULL) {
- // An uncaught exception came out of interp_run_string().
- RunFailedError = PyErr_NewException("_xxsubinterpreters.RunFailedError",
- PyExc_RuntimeError, NULL);
- if (RunFailedError == NULL) {
- return -1;
- }
- if (PyDict_SetItemString(ns, "RunFailedError", RunFailedError) != 0) {
- return -1;
- }
+ module_state *state = get_module_state(mod);
+ if (state == NULL) {
+ return -1;
}
+#define ADD(NAME, BASE) \
+ do { \
+ assert(state->NAME == NULL); \
+ state->NAME = ADD_NEW_EXCEPTION(mod, NAME, BASE); \
+ if (state->NAME == NULL) { \
+ return -1; \
+ } \
+ } while (0)
+
+ // An uncaught exception came out of interp_run_string().
+ ADD(RunFailedError, PyExc_RuntimeError);
+#undef ADD
+
return 0;
}
@@ -1860,12 +2145,24 @@ _ensure_not_running(PyInterpreterState *interp)
static int
_run_script(PyInterpreterState *interp, const char *codestr,
- _sharedns *shared, _sharedexception **exc)
+ _sharedns *shared, int needs_import,
+ _sharedexception **exc)
{
PyObject *exctype = NULL;
PyObject *excval = NULL;
PyObject *tb = NULL;
+ if (needs_import) {
+ // It might not have been imported yet in the current interpreter.
+ // However, it will (almost) always have been imported already
+ // in the main interpreter.
+ PyObject *mod = PyImport_ImportModule(MODULE_NAME);
+ if (mod == NULL) {
+ goto error;
+ }
+ Py_DECREF(mod);
+ }
+
PyObject *main_mod = _PyInterpreterState_GetMainModule(interp);
if (main_mod == NULL) {
goto error;
@@ -1918,14 +2215,17 @@ _run_script(PyInterpreterState *interp, const char *codestr,
}
static int
-_run_script_in_interpreter(PyInterpreterState *interp, const char *codestr,
- PyObject *shareables)
+_run_script_in_interpreter(PyObject *mod, PyInterpreterState *interp,
+ const char *codestr, PyObject *shareables)
{
if (_ensure_not_running(interp) < 0) {
return -1;
}
+ module_state *state = get_module_state(mod);
- _sharedns *shared = _get_shared_ns(shareables);
+ int needs_import = 0;
+ _sharedns *shared = _get_shared_ns(shareables, state->ChannelIDType,
+ &needs_import);
if (shared == NULL && PyErr_Occurred()) {
return -1;
}
@@ -1941,7 +2241,7 @@ _run_script_in_interpreter(PyInterpreterState *interp, const char *codestr,
// Run the script.
_sharedexception *exc = NULL;
- int result = _run_script(interp, codestr, shared, &exc);
+ int result = _run_script(interp, codestr, shared, needs_import, &exc);
// Switch back.
if (save_tstate != NULL) {
@@ -1950,7 +2250,8 @@ _run_script_in_interpreter(PyInterpreterState *interp, const char *codestr,
// Propagate any exception out to the caller.
if (exc != NULL) {
- _sharedexception_apply(exc, RunFailedError);
+ assert(state != NULL);
+ _sharedexception_apply(exc, state->RunFailedError);
_sharedexception_free(exc);
}
else if (result != 0) {
@@ -1972,18 +2273,41 @@ _run_script_in_interpreter(PyInterpreterState *interp, const char *codestr,
the data that we need to share between interpreters, so it cannot
hold PyObject values. */
static struct globals {
+ int module_count;
_channels channels;
-} _globals = {{0}};
+} _globals = {0};
static int
-_init_globals(void)
+_globals_init(void)
{
- if (_channels_init(&_globals.channels) != 0) {
- return -1;
+ // XXX This isn't thread-safe.
+ _globals.module_count++;
+ if (_globals.module_count > 1) {
+ // Already initialized.
+ return 0;
}
+
+ assert(_globals.channels.mutex == NULL);
+ PyThread_type_lock mutex = PyThread_allocate_lock();
+ if (mutex == NULL) {
+ return ERR_CHANNELS_MUTEX_INIT;
+ }
+ _channels_init(&_globals.channels, mutex);
return 0;
}
+static void
+_globals_fini(void)
+{
+ // XXX This isn't thread-safe.
+ _globals.module_count--;
+ if (_globals.module_count > 0) {
+ return;
+ }
+
+ _channels_fini(&_globals.channels);
+}
+
static _channels *
_global_channels(void) {
return &_globals.channels;
@@ -2052,7 +2376,7 @@ interp_destroy(PyObject *self, PyObject *args, PyObject *kwds)
}
// Ensure we don't try to destroy the current interpreter.
- PyInterpreterState *current = _get_current();
+ PyInterpreterState *current = _get_current_interp();
if (current == NULL) {
return NULL;
}
@@ -2129,7 +2453,7 @@ Return a list containing the ID of every existing interpreter.");
static PyObject *
interp_get_current(PyObject *self, PyObject *Py_UNUSED(ignored))
{
- PyInterpreterState *interp =_get_current();
+ PyInterpreterState *interp =_get_current_interp();
if (interp == NULL) {
return NULL;
}
@@ -2187,7 +2511,7 @@ interp_run_string(PyObject *self, PyObject *args, PyObject *kwds)
}
// Run the code in the interpreter.
- if (_run_script_in_interpreter(interp, codestr, shared) != 0) {
+ if (_run_script_in_interpreter(self, interp, codestr, shared) != 0) {
return NULL;
}
Py_RETURN_NONE;
@@ -2259,16 +2583,26 @@ channel_create(PyObject *self, PyObject *Py_UNUSED(ignored))
{
int64_t cid = _channel_create(&_globals.channels);
if (cid < 0) {
+ (void)handle_channel_error(-1, self, cid);
return NULL;
}
- PyObject *id = (PyObject *)newchannelid(&ChannelIDtype, cid, 0,
- &_globals.channels, 0, 0);
- if (id == NULL) {
- if (_channel_destroy(&_globals.channels, cid) != 0) {
+ module_state *state = get_module_state(self);
+ if (state == NULL) {
+ return NULL;
+ }
+ PyObject *id = NULL;
+ int err = newchannelid(state->ChannelIDType, cid, 0,
+ &_globals.channels, 0, 0,
+ (channelid **)&id);
+ if (handle_channel_error(err, self, cid)) {
+ assert(id == NULL);
+ err = _channel_destroy(&_globals.channels, cid);
+ if (handle_channel_error(err, self, cid)) {
// XXX issue a warning?
}
return NULL;
}
+ assert(id != NULL);
assert(((channelid *)id)->channels != NULL);
return id;
}
@@ -2283,12 +2617,17 @@ channel_destroy(PyObject *self, PyObject *args, PyObject *kwds)
{
static char *kwlist[] = {"cid", NULL};
int64_t cid;
+ struct channel_id_converter_data cid_data = {
+ .module = self,
+ };
if (!PyArg_ParseTupleAndKeywords(args, kwds, "O&:channel_destroy", kwlist,
- channel_id_converter, &cid)) {
+ channel_id_converter, &cid_data)) {
return NULL;
}
+ cid = cid_data.cid;
- if (_channel_destroy(&_globals.channels, cid) != 0) {
+ int err = _channel_destroy(&_globals.channels, cid);
+ if (handle_channel_error(err, self, cid)) {
return NULL;
}
Py_RETURN_NONE;
@@ -2315,14 +2654,24 @@ channel_list_all(PyObject *self, PyObject *Py_UNUSED(ignored))
if (ids == NULL) {
goto finally;
}
+ module_state *state = get_module_state(self);
+ if (state == NULL) {
+ Py_DECREF(ids);
+ ids = NULL;
+ goto finally;
+ }
int64_t *cur = cids;
for (int64_t i=0; i < count; cur++, i++) {
- PyObject *id = (PyObject *)newchannelid(&ChannelIDtype, *cur, 0,
- &_globals.channels, 0, 0);
- if (id == NULL) {
+ PyObject *id = NULL;
+ int err = newchannelid(state->ChannelIDType, *cur, 0,
+ &_globals.channels, 0, 0,
+ (channelid **)&id);
+ if (handle_channel_error(err, self, *cur)) {
+ assert(id == NULL);
Py_SETREF(ids, NULL);
break;
}
+ assert(id != NULL);
PyList_SET_ITEM(ids, (Py_ssize_t)i, id);
}
@@ -2341,6 +2690,9 @@ channel_list_interpreters(PyObject *self, PyObject *args, PyObject *kwds)
{
static char *kwlist[] = {"cid", "send", NULL};
int64_t cid; /* Channel ID */
+ struct channel_id_converter_data cid_data = {
+ .module = self,
+ };
int send = 0; /* Send or receive end? */
int64_t id;
PyObject *ids, *id_obj;
@@ -2348,9 +2700,10 @@ channel_list_interpreters(PyObject *self, PyObject *args, PyObject *kwds)
if (!PyArg_ParseTupleAndKeywords(
args, kwds, "O&$p:channel_list_interpreters",
- kwlist, channel_id_converter, &cid, &send)) {
+ kwlist, channel_id_converter, &cid_data, &send)) {
return NULL;
}
+ cid = cid_data.cid;
ids = PyList_New(0);
if (ids == NULL) {
@@ -2363,6 +2716,7 @@ channel_list_interpreters(PyObject *self, PyObject *args, PyObject *kwds)
assert(id >= 0);
int res = _channel_is_associated(&_globals.channels, cid, id, send);
if (res < 0) {
+ (void)handle_channel_error(res, self, cid);
goto except;
}
if (res) {
@@ -2402,13 +2756,18 @@ channel_send(PyObject *self, PyObject *args, PyObject *kwds)
{
static char *kwlist[] = {"cid", "obj", NULL};
int64_t cid;
+ struct channel_id_converter_data cid_data = {
+ .module = self,
+ };
PyObject *obj;
if (!PyArg_ParseTupleAndKeywords(args, kwds, "O&O:channel_send", kwlist,
- channel_id_converter, &cid, &obj)) {
+ channel_id_converter, &cid_data, &obj)) {
return NULL;
}
+ cid = cid_data.cid;
- if (_channel_send(&_globals.channels, cid, obj) != 0) {
+ int err = _channel_send(&_globals.channels, cid, obj);
+ if (handle_channel_error(err, self, cid)) {
return NULL;
}
Py_RETURN_NONE;
@@ -2424,26 +2783,32 @@ channel_recv(PyObject *self, PyObject *args, PyObject *kwds)
{
static char *kwlist[] = {"cid", "default", NULL};
int64_t cid;
+ struct channel_id_converter_data cid_data = {
+ .module = self,
+ };
PyObject *dflt = NULL;
if (!PyArg_ParseTupleAndKeywords(args, kwds, "O&|O:channel_recv", kwlist,
- channel_id_converter, &cid, &dflt)) {
+ channel_id_converter, &cid_data, &dflt)) {
return NULL;
}
- Py_XINCREF(dflt);
+ cid = cid_data.cid;
- PyObject *obj = _channel_recv(&_globals.channels, cid);
- if (obj != NULL) {
- Py_XDECREF(dflt);
- return obj;
- } else if (PyErr_Occurred()) {
- Py_XDECREF(dflt);
- return NULL;
- } else if (dflt != NULL) {
- return dflt;
- } else {
- PyErr_Format(ChannelEmptyError, "channel %" PRId64 " is empty", cid);
+ PyObject *obj = NULL;
+ int err = _channel_recv(&_globals.channels, cid, &obj);
+ if (handle_channel_error(err, self, cid)) {
return NULL;
}
+ Py_XINCREF(dflt);
+ if (obj == NULL) {
+ // Use the default.
+ if (dflt == NULL) {
+ (void)handle_channel_error(ERR_CHANNEL_EMPTY, self, cid);
+ return NULL;
+ }
+ obj = Py_NewRef(dflt);
+ }
+ Py_XDECREF(dflt);
+ return obj;
}
PyDoc_STRVAR(channel_recv_doc,
@@ -2459,16 +2824,22 @@ channel_close(PyObject *self, PyObject *args, PyObject *kwds)
{
static char *kwlist[] = {"cid", "send", "recv", "force", NULL};
int64_t cid;
+ struct channel_id_converter_data cid_data = {
+ .module = self,
+ };
int send = 0;
int recv = 0;
int force = 0;
if (!PyArg_ParseTupleAndKeywords(args, kwds,
"O&|$ppp:channel_close", kwlist,
- channel_id_converter, &cid, &send, &recv, &force)) {
+ channel_id_converter, &cid_data,
+ &send, &recv, &force)) {
return NULL;
}
+ cid = cid_data.cid;
- if (_channel_close(&_globals.channels, cid, send-recv, force) != 0) {
+ int err = _channel_close(&_globals.channels, cid, send-recv, force);
+ if (handle_channel_error(err, self, cid)) {
return NULL;
}
Py_RETURN_NONE;
@@ -2507,14 +2878,19 @@ channel_release(PyObject *self, PyObject *args, PyObject *kwds)
// Note that only the current interpreter is affected.
static char *kwlist[] = {"cid", "send", "recv", "force", NULL};
int64_t cid;
+ struct channel_id_converter_data cid_data = {
+ .module = self,
+ };
int send = 0;
int recv = 0;
int force = 0;
if (!PyArg_ParseTupleAndKeywords(args, kwds,
"O&|$ppp:channel_release", kwlist,
- channel_id_converter, &cid, &send, &recv, &force)) {
+ channel_id_converter, &cid_data,
+ &send, &recv, &force)) {
return NULL;
}
+ cid = cid_data.cid;
if (send == 0 && recv == 0) {
send = 1;
recv = 1;
@@ -2523,7 +2899,8 @@ channel_release(PyObject *self, PyObject *args, PyObject *kwds)
// XXX Handle force is True.
// XXX Fix implicit release.
- if (_channel_drop(&_globals.channels, cid, send, recv) != 0) {
+ int err = _channel_drop(&_globals.channels, cid, send, recv);
+ if (handle_channel_error(err, self, cid)) {
return NULL;
}
Py_RETURN_NONE;
@@ -2539,7 +2916,18 @@ ends are closed. Closing an already closed end is a noop.");
static PyObject *
channel__channel_id(PyObject *self, PyObject *args, PyObject *kwds)
{
- return channelid_new(&ChannelIDtype, args, kwds);
+ module_state *state = get_module_state(self);
+ if (state == NULL) {
+ return NULL;
+ }
+ PyTypeObject *cls = state->ChannelIDType;
+ PyObject *mod = get_module_from_owned_type(cls);
+ if (mod == NULL) {
+ return NULL;
+ }
+ PyObject *cid = _channelid_new(mod, cls, args, kwds);
+ Py_DECREF(mod);
+ return cid;
}
static PyMethodDef module_functions[] = {
@@ -2590,59 +2978,94 @@ PyDoc_STRVAR(module_doc,
"This module provides primitive operations to manage Python interpreters.\n\
The 'interpreters' module provides a more convenient interface.");
-static struct PyModuleDef interpretersmodule = {
- PyModuleDef_HEAD_INIT,
- "_xxsubinterpreters", /* m_name */
- module_doc, /* m_doc */
- -1, /* m_size */
- module_functions, /* m_methods */
- NULL, /* m_slots */
- NULL, /* m_traverse */
- NULL, /* m_clear */
- NULL /* m_free */
-};
-
-
-PyMODINIT_FUNC
-PyInit__xxsubinterpreters(void)
+static int
+module_exec(PyObject *mod)
{
- if (_init_globals() != 0) {
- return NULL;
- }
-
- /* Initialize types */
- if (PyType_Ready(&ChannelIDtype) != 0) {
- return NULL;
+ if (_globals_init() != 0) {
+ return -1;
}
- /* Create the module */
- PyObject *module = PyModule_Create(&interpretersmodule);
- if (module == NULL) {
- return NULL;
+ module_state *state = get_module_state(mod);
+ if (state == NULL) {
+ goto error;
}
/* Add exception types */
- PyObject *ns = PyModule_GetDict(module); // borrowed
- if (interp_exceptions_init(ns) != 0) {
- return NULL;
+ if (interp_exceptions_init(mod) != 0) {
+ goto error;
}
- if (channel_exceptions_init(ns) != 0) {
- return NULL;
+ if (channel_exceptions_init(mod) != 0) {
+ goto error;
}
/* Add other types */
- if (PyDict_SetItemString(ns, "ChannelID",
- Py_NewRef(&ChannelIDtype)) != 0) {
- return NULL;
- }
- if (PyDict_SetItemString(ns, "InterpreterID",
- Py_NewRef(&_PyInterpreterID_Type)) != 0) {
- return NULL;
+
+ // ChannelID
+ state->ChannelIDType = add_new_type(
+ mod, &ChannelIDType_spec, _channelid_shared);
+ if (state->ChannelIDType == NULL) {
+ goto error;
}
- if (_PyCrossInterpreterData_RegisterClass(&ChannelIDtype, _channelid_shared)) {
- return NULL;
+ // PyInterpreterID
+ if (PyModule_AddType(mod, &_PyInterpreterID_Type) < 0) {
+ goto error;
}
- return module;
+ return 0;
+
+error:
+ (void)_PyCrossInterpreterData_UnregisterClass(state->ChannelIDType);
+ _globals_fini();
+ return -1;
+}
+
+static struct PyModuleDef_Slot module_slots[] = {
+ {Py_mod_exec, module_exec},
+ {0, NULL},
+};
+
+static int
+module_traverse(PyObject *mod, visitproc visit, void *arg)
+{
+ module_state *state = get_module_state(mod);
+ assert(state != NULL);
+ traverse_module_state(state, visit, arg);
+ return 0;
+}
+
+static int
+module_clear(PyObject *mod)
+{
+ module_state *state = get_module_state(mod);
+ assert(state != NULL);
+ clear_module_state(state);
+ return 0;
+}
+
+static void
+module_free(void *mod)
+{
+ module_state *state = get_module_state(mod);
+ assert(state != NULL);
+ clear_module_state(state);
+ _globals_fini();
+}
+
+static struct PyModuleDef moduledef = {
+ .m_base = PyModuleDef_HEAD_INIT,
+ .m_name = MODULE_NAME,
+ .m_doc = module_doc,
+ .m_size = sizeof(module_state),
+ .m_methods = module_functions,
+ .m_slots = module_slots,
+ .m_traverse = module_traverse,
+ .m_clear = module_clear,
+ .m_free = (freefunc)module_free,
+};
+
+PyMODINIT_FUNC
+PyInit__xxsubinterpreters(void)
+{
+ return PyModuleDef_Init(&moduledef);
}
diff --git a/Modules/binascii.c b/Modules/binascii.c
index ffc2c59413613b..95ddb26988d6c9 100644
--- a/Modules/binascii.c
+++ b/Modules/binascii.c
@@ -303,14 +303,14 @@ binascii.b2a_uu
data: Py_buffer
/
*
- backtick: bool(accept={int}) = False
+ backtick: bool = False
Uuencode line of data.
[clinic start generated code]*/
static PyObject *
binascii_b2a_uu_impl(PyObject *module, Py_buffer *data, int backtick)
-/*[clinic end generated code: output=b1b99de62d9bbeb8 input=b26bc8d32b6ed2f6]*/
+/*[clinic end generated code: output=b1b99de62d9bbeb8 input=beb27822241095cd]*/
{
unsigned char *ascii_data;
const unsigned char *bin_data;
@@ -375,7 +375,7 @@ binascii.a2b_base64
data: ascii_buffer
/
*
- strict_mode: bool(accept={int}) = False
+ strict_mode: bool = False
Decode a line of base64 data.
@@ -386,7 +386,7 @@ Decode a line of base64 data.
static PyObject *
binascii_a2b_base64_impl(PyObject *module, Py_buffer *data, int strict_mode)
-/*[clinic end generated code: output=5409557788d4f975 input=3a30c4e3528317c6]*/
+/*[clinic end generated code: output=5409557788d4f975 input=c0c15fd0f8f9a62d]*/
{
assert(data->len >= 0);
@@ -521,14 +521,14 @@ binascii.b2a_base64
data: Py_buffer
/
*
- newline: bool(accept={int}) = True
+ newline: bool = True
Base64-code line of data.
[clinic start generated code]*/
static PyObject *
binascii_b2a_base64_impl(PyObject *module, Py_buffer *data, int newline)
-/*[clinic end generated code: output=4ad62c8e8485d3b3 input=6083dac5777fa45d]*/
+/*[clinic end generated code: output=4ad62c8e8485d3b3 input=0e20ff59c5f2e3e1]*/
{
unsigned char *ascii_data;
const unsigned char *bin_data;
@@ -952,14 +952,14 @@ binascii_unhexlify_impl(PyObject *module, Py_buffer *hexstr)
binascii.a2b_qp
data: ascii_buffer
- header: bool(accept={int}) = False
+ header: bool = False
Decode a string of qp-encoded data.
[clinic start generated code]*/
static PyObject *
binascii_a2b_qp_impl(PyObject *module, Py_buffer *data, int header)
-/*[clinic end generated code: output=e99f7846cfb9bc53 input=bf6766fea76cce8f]*/
+/*[clinic end generated code: output=e99f7846cfb9bc53 input=bdfb31598d4e47b9]*/
{
Py_ssize_t in, out;
char ch;
@@ -1048,9 +1048,9 @@ to_hex (unsigned char ch, unsigned char *s)
binascii.b2a_qp
data: Py_buffer
- quotetabs: bool(accept={int}) = False
- istext: bool(accept={int}) = True
- header: bool(accept={int}) = False
+ quotetabs: bool = False
+ istext: bool = True
+ header: bool = False
Encode a string using quoted-printable encoding.
@@ -1062,7 +1062,7 @@ are both encoded. When quotetabs is set, space and tabs are encoded.
static PyObject *
binascii_b2a_qp_impl(PyObject *module, Py_buffer *data, int quotetabs,
int istext, int header)
-/*[clinic end generated code: output=e9884472ebb1a94c input=21fb7eea4a184ba6]*/
+/*[clinic end generated code: output=e9884472ebb1a94c input=e9102879afb0defd]*/
{
Py_ssize_t in, out;
const unsigned char *databuf;
diff --git a/Modules/cjkcodecs/clinic/multibytecodec.c.h b/Modules/cjkcodecs/clinic/multibytecodec.c.h
index b7e340e68796fa..1b41c231eac5d8 100644
--- a/Modules/cjkcodecs/clinic/multibytecodec.c.h
+++ b/Modules/cjkcodecs/clinic/multibytecodec.c.h
@@ -246,8 +246,8 @@ _multibytecodec_MultibyteIncrementalEncoder_encode(MultibyteIncrementalEncoderOb
if (!noptargs) {
goto skip_optional_pos;
}
- final = _PyLong_AsInt(args[1]);
- if (final == -1 && PyErr_Occurred()) {
+ final = PyObject_IsTrue(args[1]);
+ if (final < 0) {
goto exit;
}
skip_optional_pos:
@@ -381,8 +381,8 @@ _multibytecodec_MultibyteIncrementalDecoder_decode(MultibyteIncrementalDecoderOb
if (!noptargs) {
goto skip_optional_pos;
}
- final = _PyLong_AsInt(args[1]);
- if (final == -1 && PyErr_Occurred()) {
+ final = PyObject_IsTrue(args[1]);
+ if (final < 0) {
goto exit;
}
skip_optional_pos:
@@ -690,4 +690,4 @@ PyDoc_STRVAR(_multibytecodec___create_codec__doc__,
#define _MULTIBYTECODEC___CREATE_CODEC_METHODDEF \
{"__create_codec", (PyCFunction)_multibytecodec___create_codec, METH_O, _multibytecodec___create_codec__doc__},
-/*[clinic end generated code: output=b034ec7126c11bde input=a9049054013a1b77]*/
+/*[clinic end generated code: output=5f0e8dacddb0ac76 input=a9049054013a1b77]*/
diff --git a/Modules/cjkcodecs/multibytecodec.c b/Modules/cjkcodecs/multibytecodec.c
index 1d77fd33ac3b87..8564494f6262fb 100644
--- a/Modules/cjkcodecs/multibytecodec.c
+++ b/Modules/cjkcodecs/multibytecodec.c
@@ -893,14 +893,14 @@ decoder_feed_buffer(MultibyteStatefulDecoderContext *ctx,
_multibytecodec.MultibyteIncrementalEncoder.encode
input: object
- final: bool(accept={int}) = False
+ final: bool = False
[clinic start generated code]*/
static PyObject *
_multibytecodec_MultibyteIncrementalEncoder_encode_impl(MultibyteIncrementalEncoderObject *self,
PyObject *input,
int final)
-/*[clinic end generated code: output=123361b6c505e2c1 input=093a1ddbb2fc6721]*/
+/*[clinic end generated code: output=123361b6c505e2c1 input=bd5f7d40d43e99b0]*/
{
return encoder_encode_stateful(STATEFUL_ECTX(self), input, final);
}
@@ -1114,14 +1114,14 @@ static PyType_Spec encoder_spec = {
_multibytecodec.MultibyteIncrementalDecoder.decode
input: Py_buffer
- final: bool(accept={int}) = False
+ final: bool = False
[clinic start generated code]*/
static PyObject *
_multibytecodec_MultibyteIncrementalDecoder_decode_impl(MultibyteIncrementalDecoderObject *self,
Py_buffer *input,
int final)
-/*[clinic end generated code: output=b9b9090e8a9ce2ba input=c9132b24d503eb1d]*/
+/*[clinic end generated code: output=b9b9090e8a9ce2ba input=8795fbb20860027a]*/
{
MultibyteDecodeBuffer buf;
char *data, *wdata = NULL;
diff --git a/Modules/clinic/_asynciomodule.c.h b/Modules/clinic/_asynciomodule.c.h
index 11db478a8b4827..f2fbb352c2c69b 100644
--- a/Modules/clinic/_asynciomodule.c.h
+++ b/Modules/clinic/_asynciomodule.c.h
@@ -987,68 +987,6 @@ _asyncio_get_event_loop(PyObject *module, PyObject *Py_UNUSED(ignored))
return _asyncio_get_event_loop_impl(module);
}
-PyDoc_STRVAR(_asyncio__get_event_loop__doc__,
-"_get_event_loop($module, /, stacklevel=3)\n"
-"--\n"
-"\n");
-
-#define _ASYNCIO__GET_EVENT_LOOP_METHODDEF \
- {"_get_event_loop", _PyCFunction_CAST(_asyncio__get_event_loop), METH_FASTCALL|METH_KEYWORDS, _asyncio__get_event_loop__doc__},
-
-static PyObject *
-_asyncio__get_event_loop_impl(PyObject *module, int stacklevel);
-
-static PyObject *
-_asyncio__get_event_loop(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
-{
- PyObject *return_value = NULL;
- #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
-
- #define NUM_KEYWORDS 1
- static struct {
- PyGC_Head _this_is_not_used;
- PyObject_VAR_HEAD
- PyObject *ob_item[NUM_KEYWORDS];
- } _kwtuple = {
- .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
- .ob_item = { &_Py_ID(stacklevel), },
- };
- #undef NUM_KEYWORDS
- #define KWTUPLE (&_kwtuple.ob_base.ob_base)
-
- #else // !Py_BUILD_CORE
- # define KWTUPLE NULL
- #endif // !Py_BUILD_CORE
-
- static const char * const _keywords[] = {"stacklevel", NULL};
- static _PyArg_Parser _parser = {
- .keywords = _keywords,
- .fname = "_get_event_loop",
- .kwtuple = KWTUPLE,
- };
- #undef KWTUPLE
- PyObject *argsbuf[1];
- Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- int stacklevel = 3;
-
- args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf);
- if (!args) {
- goto exit;
- }
- if (!noptargs) {
- goto skip_optional_pos;
- }
- stacklevel = _PyLong_AsInt(args[0]);
- if (stacklevel == -1 && PyErr_Occurred()) {
- goto exit;
- }
-skip_optional_pos:
- return_value = _asyncio__get_event_loop_impl(module, stacklevel);
-
-exit:
- return return_value;
-}
-
PyDoc_STRVAR(_asyncio_get_running_loop__doc__,
"get_running_loop($module, /)\n"
"--\n"
@@ -1304,4 +1242,4 @@ _asyncio__leave_task(PyObject *module, PyObject *const *args, Py_ssize_t nargs,
exit:
return return_value;
}
-/*[clinic end generated code: output=550bc6603df89ed9 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=83580c190031241c input=a9049054013a1b77]*/
diff --git a/Modules/clinic/_codecsmodule.c.h b/Modules/clinic/_codecsmodule.c.h
index 25db060cd900fa..f11bcc8815b920 100644
--- a/Modules/clinic/_codecsmodule.c.h
+++ b/Modules/clinic/_codecsmodule.c.h
@@ -450,8 +450,8 @@ _codecs_utf_7_decode(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (nargs < 3) {
goto skip_optional;
}
- final = _PyLong_AsInt(args[2]);
- if (final == -1 && PyErr_Occurred()) {
+ final = PyObject_IsTrue(args[2]);
+ if (final < 0) {
goto exit;
}
skip_optional:
@@ -520,8 +520,8 @@ _codecs_utf_8_decode(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (nargs < 3) {
goto skip_optional;
}
- final = _PyLong_AsInt(args[2]);
- if (final == -1 && PyErr_Occurred()) {
+ final = PyObject_IsTrue(args[2]);
+ if (final < 0) {
goto exit;
}
skip_optional:
@@ -590,8 +590,8 @@ _codecs_utf_16_decode(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (nargs < 3) {
goto skip_optional;
}
- final = _PyLong_AsInt(args[2]);
- if (final == -1 && PyErr_Occurred()) {
+ final = PyObject_IsTrue(args[2]);
+ if (final < 0) {
goto exit;
}
skip_optional:
@@ -660,8 +660,8 @@ _codecs_utf_16_le_decode(PyObject *module, PyObject *const *args, Py_ssize_t nar
if (nargs < 3) {
goto skip_optional;
}
- final = _PyLong_AsInt(args[2]);
- if (final == -1 && PyErr_Occurred()) {
+ final = PyObject_IsTrue(args[2]);
+ if (final < 0) {
goto exit;
}
skip_optional:
@@ -730,8 +730,8 @@ _codecs_utf_16_be_decode(PyObject *module, PyObject *const *args, Py_ssize_t nar
if (nargs < 3) {
goto skip_optional;
}
- final = _PyLong_AsInt(args[2]);
- if (final == -1 && PyErr_Occurred()) {
+ final = PyObject_IsTrue(args[2]);
+ if (final < 0) {
goto exit;
}
skip_optional:
@@ -809,8 +809,8 @@ _codecs_utf_16_ex_decode(PyObject *module, PyObject *const *args, Py_ssize_t nar
if (nargs < 4) {
goto skip_optional;
}
- final = _PyLong_AsInt(args[3]);
- if (final == -1 && PyErr_Occurred()) {
+ final = PyObject_IsTrue(args[3]);
+ if (final < 0) {
goto exit;
}
skip_optional:
@@ -879,8 +879,8 @@ _codecs_utf_32_decode(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (nargs < 3) {
goto skip_optional;
}
- final = _PyLong_AsInt(args[2]);
- if (final == -1 && PyErr_Occurred()) {
+ final = PyObject_IsTrue(args[2]);
+ if (final < 0) {
goto exit;
}
skip_optional:
@@ -949,8 +949,8 @@ _codecs_utf_32_le_decode(PyObject *module, PyObject *const *args, Py_ssize_t nar
if (nargs < 3) {
goto skip_optional;
}
- final = _PyLong_AsInt(args[2]);
- if (final == -1 && PyErr_Occurred()) {
+ final = PyObject_IsTrue(args[2]);
+ if (final < 0) {
goto exit;
}
skip_optional:
@@ -1019,8 +1019,8 @@ _codecs_utf_32_be_decode(PyObject *module, PyObject *const *args, Py_ssize_t nar
if (nargs < 3) {
goto skip_optional;
}
- final = _PyLong_AsInt(args[2]);
- if (final == -1 && PyErr_Occurred()) {
+ final = PyObject_IsTrue(args[2]);
+ if (final < 0) {
goto exit;
}
skip_optional:
@@ -1098,8 +1098,8 @@ _codecs_utf_32_ex_decode(PyObject *module, PyObject *const *args, Py_ssize_t nar
if (nargs < 4) {
goto skip_optional;
}
- final = _PyLong_AsInt(args[3]);
- if (final == -1 && PyErr_Occurred()) {
+ final = PyObject_IsTrue(args[3]);
+ if (final < 0) {
goto exit;
}
skip_optional:
@@ -1178,8 +1178,8 @@ _codecs_unicode_escape_decode(PyObject *module, PyObject *const *args, Py_ssize_
if (nargs < 3) {
goto skip_optional;
}
- final = _PyLong_AsInt(args[2]);
- if (final == -1 && PyErr_Occurred()) {
+ final = PyObject_IsTrue(args[2]);
+ if (final < 0) {
goto exit;
}
skip_optional:
@@ -1258,8 +1258,8 @@ _codecs_raw_unicode_escape_decode(PyObject *module, PyObject *const *args, Py_ss
if (nargs < 3) {
goto skip_optional;
}
- final = _PyLong_AsInt(args[2]);
- if (final == -1 && PyErr_Occurred()) {
+ final = PyObject_IsTrue(args[2]);
+ if (final < 0) {
goto exit;
}
skip_optional:
@@ -1521,8 +1521,8 @@ _codecs_mbcs_decode(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (nargs < 3) {
goto skip_optional;
}
- final = _PyLong_AsInt(args[2]);
- if (final == -1 && PyErr_Occurred()) {
+ final = PyObject_IsTrue(args[2]);
+ if (final < 0) {
goto exit;
}
skip_optional:
@@ -1595,8 +1595,8 @@ _codecs_oem_decode(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (nargs < 3) {
goto skip_optional;
}
- final = _PyLong_AsInt(args[2]);
- if (final == -1 && PyErr_Occurred()) {
+ final = PyObject_IsTrue(args[2]);
+ if (final < 0) {
goto exit;
}
skip_optional:
@@ -1674,8 +1674,8 @@ _codecs_code_page_decode(PyObject *module, PyObject *const *args, Py_ssize_t nar
if (nargs < 4) {
goto skip_optional;
}
- final = _PyLong_AsInt(args[3]);
- if (final == -1 && PyErr_Occurred()) {
+ final = PyObject_IsTrue(args[3]);
+ if (final < 0) {
goto exit;
}
skip_optional:
@@ -2869,4 +2869,4 @@ _codecs_lookup_error(PyObject *module, PyObject *arg)
#ifndef _CODECS_CODE_PAGE_ENCODE_METHODDEF
#define _CODECS_CODE_PAGE_ENCODE_METHODDEF
#endif /* !defined(_CODECS_CODE_PAGE_ENCODE_METHODDEF) */
-/*[clinic end generated code: output=e885abad241bc54d input=a9049054013a1b77]*/
+/*[clinic end generated code: output=603da07cf8dfeb4b input=a9049054013a1b77]*/
diff --git a/Modules/clinic/_cursesmodule.c.h b/Modules/clinic/_cursesmodule.c.h
index 67fadace863970..9d99d41af5d2d9 100644
--- a/Modules/clinic/_cursesmodule.c.h
+++ b/Modules/clinic/_cursesmodule.c.h
@@ -1748,7 +1748,7 @@ _curses_window_touchline(PyCursesWindowObject *self, PyObject *args)
}
break;
case 3:
- if (!PyArg_ParseTuple(args, "iii:touchline", &start, &count, &changed)) {
+ if (!PyArg_ParseTuple(args, "iip:touchline", &start, &count, &changed)) {
goto exit;
}
group_right_1 = 1;
@@ -1941,8 +1941,8 @@ _curses_cbreak(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (nargs < 1) {
goto skip_optional;
}
- flag = _PyLong_AsInt(args[0]);
- if (flag == -1 && PyErr_Occurred()) {
+ flag = PyObject_IsTrue(args[0]);
+ if (flag < 0) {
goto exit;
}
skip_optional:
@@ -2177,8 +2177,8 @@ _curses_echo(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (nargs < 1) {
goto skip_optional;
}
- flag = _PyLong_AsInt(args[0]);
- if (flag == -1 && PyErr_Occurred()) {
+ flag = PyObject_IsTrue(args[0]);
+ if (flag < 0) {
goto exit;
}
skip_optional:
@@ -2900,8 +2900,8 @@ _curses_intrflush(PyObject *module, PyObject *arg)
PyObject *return_value = NULL;
int flag;
- flag = _PyLong_AsInt(arg);
- if (flag == -1 && PyErr_Occurred()) {
+ flag = PyObject_IsTrue(arg);
+ if (flag < 0) {
goto exit;
}
return_value = _curses_intrflush_impl(module, flag);
@@ -3064,8 +3064,8 @@ _curses_meta(PyObject *module, PyObject *arg)
PyObject *return_value = NULL;
int yes;
- yes = _PyLong_AsInt(arg);
- if (yes == -1 && PyErr_Occurred()) {
+ yes = PyObject_IsTrue(arg);
+ if (yes < 0) {
goto exit;
}
return_value = _curses_meta_impl(module, yes);
@@ -3308,8 +3308,8 @@ _curses_nl(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (nargs < 1) {
goto skip_optional;
}
- flag = _PyLong_AsInt(args[0]);
- if (flag == -1 && PyErr_Occurred()) {
+ flag = PyObject_IsTrue(args[0]);
+ if (flag < 0) {
goto exit;
}
skip_optional:
@@ -3540,8 +3540,8 @@ _curses_qiflush(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (nargs < 1) {
goto skip_optional;
}
- flag = _PyLong_AsInt(args[0]);
- if (flag == -1 && PyErr_Occurred()) {
+ flag = PyObject_IsTrue(args[0]);
+ if (flag < 0) {
goto exit;
}
skip_optional:
@@ -3603,8 +3603,8 @@ _curses_raw(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (nargs < 1) {
goto skip_optional;
}
- flag = _PyLong_AsInt(args[0]);
- if (flag == -1 && PyErr_Occurred()) {
+ flag = PyObject_IsTrue(args[0]);
+ if (flag < 0) {
goto exit;
}
skip_optional:
@@ -4164,8 +4164,8 @@ _curses_use_env(PyObject *module, PyObject *arg)
PyObject *return_value = NULL;
int flag;
- flag = _PyLong_AsInt(arg);
- if (flag == -1 && PyErr_Occurred()) {
+ flag = PyObject_IsTrue(arg);
+ if (flag < 0) {
goto exit;
}
return_value = _curses_use_env_impl(module, flag);
@@ -4313,4 +4313,4 @@ _curses_has_extended_color_support(PyObject *module, PyObject *Py_UNUSED(ignored
#ifndef _CURSES_USE_DEFAULT_COLORS_METHODDEF
#define _CURSES_USE_DEFAULT_COLORS_METHODDEF
#endif /* !defined(_CURSES_USE_DEFAULT_COLORS_METHODDEF) */
-/*[clinic end generated code: output=b2e71e2012f16197 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=27a2364193b503c1 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/_ssl.c.h b/Modules/clinic/_ssl.c.h
index 622e321fa1d8b3..2d7c98c4f014a3 100644
--- a/Modules/clinic/_ssl.c.h
+++ b/Modules/clinic/_ssl.c.h
@@ -757,8 +757,8 @@ _ssl__SSLContext__wrap_socket(PySSLContext *self, PyObject *const *args, Py_ssiz
goto exit;
}
sock = args[0];
- server_side = _PyLong_AsInt(args[1]);
- if (server_side == -1 && PyErr_Occurred()) {
+ server_side = PyObject_IsTrue(args[1]);
+ if (server_side < 0) {
goto exit;
}
if (!noptargs) {
@@ -855,8 +855,8 @@ _ssl__SSLContext__wrap_bio(PySSLContext *self, PyObject *const *args, Py_ssize_t
goto exit;
}
outgoing = (PySSLMemoryBIO *)args[1];
- server_side = _PyLong_AsInt(args[2]);
- if (server_side == -1 && PyErr_Occurred()) {
+ server_side = PyObject_IsTrue(args[2]);
+ if (server_side < 0) {
goto exit;
}
if (!noptargs) {
@@ -1543,4 +1543,4 @@ _ssl_enum_crls(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje
#ifndef _SSL_ENUM_CRLS_METHODDEF
#define _SSL_ENUM_CRLS_METHODDEF
#endif /* !defined(_SSL_ENUM_CRLS_METHODDEF) */
-/*[clinic end generated code: output=9f477b0c709acb28 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=a3d97a19163bb044 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/_tkinter.c.h b/Modules/clinic/_tkinter.c.h
index a251202f9bba2c..96c6ee26f426c3 100644
--- a/Modules/clinic/_tkinter.c.h
+++ b/Modules/clinic/_tkinter.c.h
@@ -747,29 +747,29 @@ _tkinter_create(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (nargs < 4) {
goto skip_optional;
}
- interactive = _PyLong_AsInt(args[3]);
- if (interactive == -1 && PyErr_Occurred()) {
+ interactive = PyObject_IsTrue(args[3]);
+ if (interactive < 0) {
goto exit;
}
if (nargs < 5) {
goto skip_optional;
}
- wantobjects = _PyLong_AsInt(args[4]);
- if (wantobjects == -1 && PyErr_Occurred()) {
+ wantobjects = PyObject_IsTrue(args[4]);
+ if (wantobjects < 0) {
goto exit;
}
if (nargs < 6) {
goto skip_optional;
}
- wantTk = _PyLong_AsInt(args[5]);
- if (wantTk == -1 && PyErr_Occurred()) {
+ wantTk = PyObject_IsTrue(args[5]);
+ if (wantTk < 0) {
goto exit;
}
if (nargs < 7) {
goto skip_optional;
}
- sync = _PyLong_AsInt(args[6]);
- if (sync == -1 && PyErr_Occurred()) {
+ sync = PyObject_IsTrue(args[6]);
+ if (sync < 0) {
goto exit;
}
if (nargs < 8) {
@@ -865,4 +865,4 @@ _tkinter_getbusywaitinterval(PyObject *module, PyObject *Py_UNUSED(ignored))
#ifndef _TKINTER_TKAPP_DELETEFILEHANDLER_METHODDEF
#define _TKINTER_TKAPP_DELETEFILEHANDLER_METHODDEF
#endif /* !defined(_TKINTER_TKAPP_DELETEFILEHANDLER_METHODDEF) */
-/*[clinic end generated code: output=d022835d05fc8608 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=2a4e3bf8448604b5 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/_winapi.c.h b/Modules/clinic/_winapi.c.h
index 13bf8b482cd69e..891b3f851d1243 100644
--- a/Modules/clinic/_winapi.c.h
+++ b/Modules/clinic/_winapi.c.h
@@ -133,7 +133,7 @@ _winapi_ConnectNamedPipe(PyObject *module, PyObject *const *args, Py_ssize_t nar
static const char * const _keywords[] = {"handle", "overlapped", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
- .format = "" F_HANDLE "|i:ConnectNamedPipe",
+ .format = "" F_HANDLE "|p:ConnectNamedPipe",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
@@ -972,7 +972,7 @@ _winapi_ReadFile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyOb
static const char * const _keywords[] = {"handle", "size", "overlapped", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
- .format = "" F_HANDLE "k|i:ReadFile",
+ .format = "" F_HANDLE "k|p:ReadFile",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
@@ -1220,7 +1220,7 @@ _winapi_WriteFile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyO
static const char * const _keywords[] = {"handle", "buffer", "overlapped", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
- .format = "" F_HANDLE "O|i:WriteFile",
+ .format = "" F_HANDLE "O|p:WriteFile",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
@@ -1371,4 +1371,4 @@ _winapi__mimetypes_read_windows_registry(PyObject *module, PyObject *const *args
exit:
return return_value;
}
-/*[clinic end generated code: output=23ea9e176d86e026 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=edb1a9d1bbfd6394 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/binascii.c.h b/Modules/clinic/binascii.c.h
index 23ebdff2108258..63566dfb10e74f 100644
--- a/Modules/clinic/binascii.c.h
+++ b/Modules/clinic/binascii.c.h
@@ -99,8 +99,8 @@ binascii_b2a_uu(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj
if (!noptargs) {
goto skip_optional_kwonly;
}
- backtick = _PyLong_AsInt(args[1]);
- if (backtick == -1 && PyErr_Occurred()) {
+ backtick = PyObject_IsTrue(args[1]);
+ if (backtick < 0) {
goto exit;
}
skip_optional_kwonly:
@@ -175,8 +175,8 @@ binascii_a2b_base64(PyObject *module, PyObject *const *args, Py_ssize_t nargs, P
if (!noptargs) {
goto skip_optional_kwonly;
}
- strict_mode = _PyLong_AsInt(args[1]);
- if (strict_mode == -1 && PyErr_Occurred()) {
+ strict_mode = PyObject_IsTrue(args[1]);
+ if (strict_mode < 0) {
goto exit;
}
skip_optional_kwonly:
@@ -250,8 +250,8 @@ binascii_b2a_base64(PyObject *module, PyObject *const *args, Py_ssize_t nargs, P
if (!noptargs) {
goto skip_optional_kwonly;
}
- newline = _PyLong_AsInt(args[1]);
- if (newline == -1 && PyErr_Occurred()) {
+ newline = PyObject_IsTrue(args[1]);
+ if (newline < 0) {
goto exit;
}
skip_optional_kwonly:
@@ -680,8 +680,8 @@ binascii_a2b_qp(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj
if (!noptargs) {
goto skip_optional_pos;
}
- header = _PyLong_AsInt(args[1]);
- if (header == -1 && PyErr_Occurred()) {
+ header = PyObject_IsTrue(args[1]);
+ if (header < 0) {
goto exit;
}
skip_optional_pos:
@@ -763,8 +763,8 @@ binascii_b2a_qp(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj
goto skip_optional_pos;
}
if (args[1]) {
- quotetabs = _PyLong_AsInt(args[1]);
- if (quotetabs == -1 && PyErr_Occurred()) {
+ quotetabs = PyObject_IsTrue(args[1]);
+ if (quotetabs < 0) {
goto exit;
}
if (!--noptargs) {
@@ -772,16 +772,16 @@ binascii_b2a_qp(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj
}
}
if (args[2]) {
- istext = _PyLong_AsInt(args[2]);
- if (istext == -1 && PyErr_Occurred()) {
+ istext = PyObject_IsTrue(args[2]);
+ if (istext < 0) {
goto exit;
}
if (!--noptargs) {
goto skip_optional_pos;
}
}
- header = _PyLong_AsInt(args[3]);
- if (header == -1 && PyErr_Occurred()) {
+ header = PyObject_IsTrue(args[3]);
+ if (header < 0) {
goto exit;
}
skip_optional_pos:
@@ -795,4 +795,4 @@ binascii_b2a_qp(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj
return return_value;
}
-/*[clinic end generated code: output=a266ba13c374aefa input=a9049054013a1b77]*/
+/*[clinic end generated code: output=ab156917c9db79d2 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/itertoolsmodule.c.h b/Modules/clinic/itertoolsmodule.c.h
index 17f9ebb249390f..287de524e91307 100644
--- a/Modules/clinic/itertoolsmodule.c.h
+++ b/Modules/clinic/itertoolsmodule.c.h
@@ -12,19 +12,19 @@ PyDoc_STRVAR(batched_new__doc__,
"batched(iterable, n)\n"
"--\n"
"\n"
-"Batch data into lists of length n. The last batch may be shorter than n.\n"
+"Batch data into tuples of length n. The last batch may be shorter than n.\n"
"\n"
-"Loops over the input iterable and accumulates data into lists\n"
+"Loops over the input iterable and accumulates data into tuples\n"
"up to size n. The input is consumed lazily, just enough to\n"
-"fill a list. The result is yielded as soon as a batch is full\n"
+"fill a batch. The result is yielded as soon as a batch is full\n"
"or when the input iterable is exhausted.\n"
"\n"
" >>> for batch in batched(\'ABCDEFG\', 3):\n"
" ... print(batch)\n"
" ...\n"
-" [\'A\', \'B\', \'C\']\n"
-" [\'D\', \'E\', \'F\']\n"
-" [\'G\']");
+" (\'A\', \'B\', \'C\')\n"
+" (\'D\', \'E\', \'F\')\n"
+" (\'G\',)");
static PyObject *
batched_new_impl(PyTypeObject *type, PyObject *iterable, Py_ssize_t n);
@@ -913,4 +913,4 @@ itertools_count(PyTypeObject *type, PyObject *args, PyObject *kwargs)
exit:
return return_value;
}
-/*[clinic end generated code: output=efea8cd1e647bd17 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=0229ebd72962f130 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/posixmodule.c.h b/Modules/clinic/posixmodule.c.h
index f9f6ca372ec6c7..86251008b1bdae 100644
--- a/Modules/clinic/posixmodule.c.h
+++ b/Modules/clinic/posixmodule.c.h
@@ -3110,8 +3110,8 @@ os_posix_spawn(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje
}
}
if (args[5]) {
- resetids = _PyLong_AsInt(args[5]);
- if (resetids == -1 && PyErr_Occurred()) {
+ resetids = PyObject_IsTrue(args[5]);
+ if (resetids < 0) {
goto exit;
}
if (!--noptargs) {
@@ -3119,8 +3119,8 @@ os_posix_spawn(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje
}
}
if (args[6]) {
- setsid = _PyLong_AsInt(args[6]);
- if (setsid == -1 && PyErr_Occurred()) {
+ setsid = PyObject_IsTrue(args[6]);
+ if (setsid < 0) {
goto exit;
}
if (!--noptargs) {
@@ -3260,8 +3260,8 @@ os_posix_spawnp(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj
}
}
if (args[5]) {
- resetids = _PyLong_AsInt(args[5]);
- if (resetids == -1 && PyErr_Occurred()) {
+ resetids = PyObject_IsTrue(args[5]);
+ if (resetids < 0) {
goto exit;
}
if (!--noptargs) {
@@ -3269,8 +3269,8 @@ os_posix_spawnp(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj
}
}
if (args[6]) {
- setsid = _PyLong_AsInt(args[6]);
- if (setsid == -1 && PyErr_Occurred()) {
+ setsid = PyObject_IsTrue(args[6]);
+ if (setsid < 0) {
goto exit;
}
if (!--noptargs) {
@@ -10225,8 +10225,8 @@ os_set_blocking(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (fd == -1 && PyErr_Occurred()) {
goto exit;
}
- blocking = _PyLong_AsInt(args[1]);
- if (blocking == -1 && PyErr_Occurred()) {
+ blocking = PyObject_IsTrue(args[1]);
+ if (blocking < 0) {
goto exit;
}
return_value = os_set_blocking_impl(module, fd, blocking);
@@ -11549,4 +11549,4 @@ os_waitstatus_to_exitcode(PyObject *module, PyObject *const *args, Py_ssize_t na
#ifndef OS_WAITSTATUS_TO_EXITCODE_METHODDEF
#define OS_WAITSTATUS_TO_EXITCODE_METHODDEF
#endif /* !defined(OS_WAITSTATUS_TO_EXITCODE_METHODDEF) */
-/*[clinic end generated code: output=4192d8e09e216300 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=04fd23c89ab41f75 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/pyexpat.c.h b/Modules/clinic/pyexpat.c.h
index 0454fbc9994504..34937c5d594f5c 100644
--- a/Modules/clinic/pyexpat.c.h
+++ b/Modules/clinic/pyexpat.c.h
@@ -52,8 +52,8 @@ pyexpat_xmlparser_Parse(xmlparseobject *self, PyTypeObject *cls, PyObject *const
if (nargs < 2) {
goto skip_optional_posonly;
}
- isfinal = _PyLong_AsInt(args[1]);
- if (isfinal == -1 && PyErr_Occurred()) {
+ isfinal = PyObject_IsTrue(args[1]);
+ if (isfinal < 0) {
goto exit;
}
skip_optional_posonly:
@@ -498,4 +498,4 @@ pyexpat_ErrorString(PyObject *module, PyObject *arg)
#ifndef PYEXPAT_XMLPARSER_USEFOREIGNDTD_METHODDEF
#define PYEXPAT_XMLPARSER_USEFOREIGNDTD_METHODDEF
#endif /* !defined(PYEXPAT_XMLPARSER_USEFOREIGNDTD_METHODDEF) */
-/*[clinic end generated code: output=de5f664ef05ef34a input=a9049054013a1b77]*/
+/*[clinic end generated code: output=63efc62e24a7b5a7 input=a9049054013a1b77]*/
diff --git a/Modules/faulthandler.c b/Modules/faulthandler.c
index fa4c2d0cccd1b6..5309a3728c5e07 100644
--- a/Modules/faulthandler.c
+++ b/Modules/faulthandler.c
@@ -18,12 +18,6 @@
# include
#endif
-/* Using an alternative stack requires sigaltstack()
- and sigaction() SA_ONSTACK */
-#if defined(HAVE_SIGALTSTACK) && defined(HAVE_SIGACTION)
-# define FAULTHANDLER_USE_ALT_STACK
-#endif
-
#if defined(FAULTHANDLER_USE_ALT_STACK) && defined(HAVE_LINUX_AUXVEC_H) && defined(HAVE_SYS_AUXV_H)
# include // AT_MINSIGSTKSZ
# include // getauxval()
@@ -32,13 +26,6 @@
/* Allocate at maximum 100 MiB of the stack to raise the stack overflow */
#define STACK_OVERFLOW_MAX_SIZE (100 * 1024 * 1024)
-#ifndef MS_WINDOWS
- /* register() is useless on Windows, because only SIGSEGV, SIGABRT and
- SIGILL can be handled by the process, and these signals can only be used
- with enable(), not using register() */
-# define FAULTHANDLER_USER
-#endif
-
#define PUTS(fd, str) _Py_write_noraise(fd, str, strlen(str))
@@ -58,12 +45,6 @@
#endif
-#ifdef HAVE_SIGACTION
-typedef struct sigaction _Py_sighandler_t;
-#else
-typedef PyOS_sighandler_t _Py_sighandler_t;
-#endif
-
typedef struct {
int signum;
int enabled;
@@ -72,47 +53,12 @@ typedef struct {
int all_threads;
} fault_handler_t;
-static struct {
- int enabled;
- PyObject *file;
- int fd;
- int all_threads;
- PyInterpreterState *interp;
-#ifdef MS_WINDOWS
- void *exc_handler;
-#endif
-} fatal_error = {0, NULL, -1, 0};
-
-static struct {
- PyObject *file;
- int fd;
- PY_TIMEOUT_T timeout_us; /* timeout in microseconds */
- int repeat;
- PyInterpreterState *interp;
- int exit;
- char *header;
- size_t header_len;
- /* The main thread always holds this lock. It is only released when
- faulthandler_thread() is interrupted before this thread exits, or at
- Python exit. */
- PyThread_type_lock cancel_event;
- /* released by child thread when joined */
- PyThread_type_lock running;
-} thread;
+#define fatal_error _PyRuntime.faulthandler.fatal_error
+#define thread _PyRuntime.faulthandler.thread
#ifdef FAULTHANDLER_USER
-typedef struct {
- int enabled;
- PyObject *file;
- int fd;
- int all_threads;
- int chain;
- _Py_sighandler_t previous;
- PyInterpreterState *interp;
-} user_signal_t;
-
-static user_signal_t *user_signals;
-
+#define user_signals _PyRuntime.faulthandler.user_signals
+typedef struct faulthandler_user_signal user_signal_t;
static void faulthandler_user(int signum);
#endif /* FAULTHANDLER_USER */
@@ -134,8 +80,8 @@ static const size_t faulthandler_nsignals = \
Py_ARRAY_LENGTH(faulthandler_handlers);
#ifdef FAULTHANDLER_USE_ALT_STACK
-static stack_t stack;
-static stack_t old_stack;
+# define stack _PyRuntime.faulthandler.stack
+# define old_stack _PyRuntime.faulthandler.old_stack
#endif
@@ -270,7 +216,7 @@ faulthandler_dump_traceback_py(PyObject *self,
int fd;
if (!PyArg_ParseTupleAndKeywords(args, kwargs,
- "|Oi:dump_traceback", kwlist,
+ "|Op:dump_traceback", kwlist,
&file, &all_threads))
return NULL;
@@ -546,7 +492,7 @@ faulthandler_py_enable(PyObject *self, PyObject *args, PyObject *kwargs)
PyThreadState *tstate;
if (!PyArg_ParseTupleAndKeywords(args, kwargs,
- "|Oi:enable", kwlist, &file, &all_threads))
+ "|Op:enable", kwlist, &file, &all_threads))
return NULL;
fd = faulthandler_get_fileno(&file);
@@ -916,7 +862,7 @@ faulthandler_register_py(PyObject *self,
int err;
if (!PyArg_ParseTupleAndKeywords(args, kwargs,
- "i|Oii:register", kwlist,
+ "i|Opp:register", kwlist,
&signum, &file, &all_threads, &chain))
return NULL;
@@ -1094,7 +1040,7 @@ faulthandler_fatal_error_thread(void *plock)
static PyObject *
faulthandler_fatal_error_c_thread(PyObject *self, PyObject *args)
{
- long thread;
+ long tid;
PyThread_type_lock lock;
faulthandler_suppress_crash_report();
@@ -1105,8 +1051,8 @@ faulthandler_fatal_error_c_thread(PyObject *self, PyObject *args)
PyThread_acquire_lock(lock, WAIT_LOCK);
- thread = PyThread_start_new_thread(faulthandler_fatal_error_thread, lock);
- if (thread == -1) {
+ tid = PyThread_start_new_thread(faulthandler_fatal_error_thread, lock);
+ if (tid == -1) {
PyThread_free_lock(lock);
PyErr_SetString(PyExc_RuntimeError, "unable to start the thread");
return NULL;
diff --git a/Modules/itertoolsmodule.c b/Modules/itertoolsmodule.c
index e8b9bc76eec935..c1f1e7320db719 100644
--- a/Modules/itertoolsmodule.c
+++ b/Modules/itertoolsmodule.c
@@ -56,11 +56,13 @@ static PyTypeObject pairwise_type;
/* batched object ************************************************************/
/* Note: The built-in zip() function includes a "strict" argument
- that is needed because that function can silently truncate data
- and there is no easy way for a user to detect that condition.
- The same reasoning does not apply to batched() which never drops
- data. Instead, it produces a shorter list which can be handled
- as the user sees fit.
+ that was needed because that function would silently truncate data,
+ and there was no easy way for a user to detect the data loss.
+ The same reasoning does not apply to batched() which never drops data.
+ Instead, batched() produces a shorter tuple which can be handled
+ as the user sees fit. If requested, it would be reasonable to add
+ "fillvalue" support which had demonstrated value in zip_longest().
+ For now, the API is kept simple and clean.
*/
typedef struct {
@@ -74,25 +76,25 @@ typedef struct {
itertools.batched.__new__ as batched_new
iterable: object
n: Py_ssize_t
-Batch data into lists of length n. The last batch may be shorter than n.
+Batch data into tuples of length n. The last batch may be shorter than n.
-Loops over the input iterable and accumulates data into lists
+Loops over the input iterable and accumulates data into tuples
up to size n. The input is consumed lazily, just enough to
-fill a list. The result is yielded as soon as a batch is full
+fill a batch. The result is yielded as soon as a batch is full
or when the input iterable is exhausted.
>>> for batch in batched('ABCDEFG', 3):
... print(batch)
...
- ['A', 'B', 'C']
- ['D', 'E', 'F']
- ['G']
+ ('A', 'B', 'C')
+ ('D', 'E', 'F')
+ ('G',)
[clinic start generated code]*/
static PyObject *
batched_new_impl(PyTypeObject *type, PyObject *iterable, Py_ssize_t n)
-/*[clinic end generated code: output=7ebc954d655371b6 input=f28fd12cb52365f0]*/
+/*[clinic end generated code: output=7ebc954d655371b6 input=ffd70726927c5129]*/
{
PyObject *it;
batchedobject *bo;
@@ -150,12 +152,12 @@ batched_next(batchedobject *bo)
if (it == NULL) {
return NULL;
}
- result = PyList_New(n);
+ result = PyTuple_New(n);
if (result == NULL) {
return NULL;
}
iternextfunc iternext = *Py_TYPE(it)->tp_iternext;
- PyObject **items = _PyList_ITEMS(result);
+ PyObject **items = _PyTuple_ITEMS(result);
for (i=0 ; i < n ; i++) {
item = iternext(it);
if (item == NULL) {
@@ -180,8 +182,7 @@ batched_next(batchedobject *bo)
Py_DECREF(result);
return NULL;
}
- /* Elements in result[i:] are still NULL */
- Py_SET_SIZE(result, i);
+ _PyTuple_Resize(&result, i);
return result;
}
@@ -1368,6 +1369,7 @@ cycle_setstate(cycleobject *lz, PyObject *state)
PyErr_SetString(PyExc_TypeError, "state is not a tuple");
return NULL;
}
+ // The second item can be 1/0 in old pickles and True/False in new pickles
if (!PyArg_ParseTuple(state, "O!i", &PyList_Type, &saved, &firstpass)) {
return NULL;
}
diff --git a/Modules/ossaudiodev.c b/Modules/ossaudiodev.c
index 79f4ebad836c8b..2319eac449eb4f 100644
--- a/Modules/ossaudiodev.c
+++ b/Modules/ossaudiodev.c
@@ -569,7 +569,7 @@ oss_setparameters(oss_audio_t *self, PyObject *args)
if (!_is_fd_valid(self->fd))
return NULL;
- if (!PyArg_ParseTuple(args, "iii|i:setparameters",
+ if (!PyArg_ParseTuple(args, "iii|p:setparameters",
&wanted_fmt, &wanted_channels, &wanted_rate,
&strict))
return NULL;
diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c
index 95ecf1c7c4b28c..4817973262f484 100644
--- a/Modules/posixmodule.c
+++ b/Modules/posixmodule.c
@@ -495,9 +495,11 @@ extern char *ctermid_r(char *);
#ifdef MS_WINDOWS
# define INITFUNC PyInit_nt
# define MODNAME "nt"
+# define MODNAME_OBJ &_Py_ID(nt)
#else
# define INITFUNC PyInit_posix
# define MODNAME "posix"
+# define MODNAME_OBJ &_Py_ID(posix)
#endif
#if defined(__sun)
@@ -974,6 +976,7 @@ typedef struct {
#if defined(HAVE_SCHED_SETPARAM) || defined(HAVE_SCHED_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDPARAM)
PyObject *SchedParamType;
#endif
+ newfunc statresult_new_orig;
PyObject *StatResultType;
PyObject *StatVFSResultType;
PyObject *TerminalSizeType;
@@ -2225,7 +2228,6 @@ static PyStructSequence_Desc waitid_result_desc = {
5
};
#endif
-static newfunc structseq_new;
static PyObject *
statresult_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
@@ -2233,6 +2235,19 @@ statresult_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
PyStructSequence *result;
int i;
+ // ht_module doesn't get set in PyStructSequence_NewType(),
+ // so we can't use PyType_GetModule().
+ PyObject *mod = PyImport_GetModule(MODNAME_OBJ);
+ if (mod == NULL) {
+ return NULL;
+ }
+ _posixstate *state = get_posix_state(mod);
+ Py_DECREF(mod);
+ if (state == NULL) {
+ return NULL;
+ }
+#define structseq_new state->statresult_new_orig
+
result = (PyStructSequence*)structseq_new(type, args, kwds);
if (!result)
return NULL;
@@ -6324,9 +6339,9 @@ os.posix_spawn
A sequence of file action tuples.
setpgroup: object = NULL
The pgroup to use with the POSIX_SPAWN_SETPGROUP flag.
- resetids: bool(accept={int}) = False
+ resetids: bool = False
If the value is `true` the POSIX_SPAWN_RESETIDS will be activated.
- setsid: bool(accept={int}) = False
+ setsid: bool = False
If the value is `true` the POSIX_SPAWN_SETSID or POSIX_SPAWN_SETSID_NP will be activated.
setsigmask: object(c_default='NULL') = ()
The sigmask to use with the POSIX_SPAWN_SETSIGMASK flag.
@@ -6344,7 +6359,7 @@ os_posix_spawn_impl(PyObject *module, path_t *path, PyObject *argv,
PyObject *setpgroup, int resetids, int setsid,
PyObject *setsigmask, PyObject *setsigdef,
PyObject *scheduler)
-/*[clinic end generated code: output=14a1098c566bc675 input=8c6305619a00ad04]*/
+/*[clinic end generated code: output=14a1098c566bc675 input=808aed1090d84e33]*/
{
return py_posix_spawn(0, module, path, argv, env, file_actions,
setpgroup, resetids, setsid, setsigmask, setsigdef,
@@ -6370,9 +6385,9 @@ os.posix_spawnp
A sequence of file action tuples.
setpgroup: object = NULL
The pgroup to use with the POSIX_SPAWN_SETPGROUP flag.
- resetids: bool(accept={int}) = False
+ resetids: bool = False
If the value is `True` the POSIX_SPAWN_RESETIDS will be activated.
- setsid: bool(accept={int}) = False
+ setsid: bool = False
If the value is `True` the POSIX_SPAWN_SETSID or POSIX_SPAWN_SETSID_NP will be activated.
setsigmask: object(c_default='NULL') = ()
The sigmask to use with the POSIX_SPAWN_SETSIGMASK flag.
@@ -6390,7 +6405,7 @@ os_posix_spawnp_impl(PyObject *module, path_t *path, PyObject *argv,
PyObject *setpgroup, int resetids, int setsid,
PyObject *setsigmask, PyObject *setsigdef,
PyObject *scheduler)
-/*[clinic end generated code: output=7b9aaefe3031238d input=c1911043a22028da]*/
+/*[clinic end generated code: output=7b9aaefe3031238d input=9e89e616116752a1]*/
{
return py_posix_spawn(1, module, path, argv, env, file_actions,
setpgroup, resetids, setsid, setsigmask, setsigdef,
@@ -9051,11 +9066,6 @@ build_times_result(PyObject *module, double user, double system,
}
-#ifndef MS_WINDOWS
-#define NEED_TICKS_PER_SECOND
-static long ticks_per_second = -1;
-#endif /* MS_WINDOWS */
-
/*[clinic input]
os.times
@@ -9091,20 +9101,22 @@ os_times_impl(PyObject *module)
}
#else /* MS_WINDOWS */
{
-
-
struct tms t;
clock_t c;
errno = 0;
c = times(&t);
- if (c == (clock_t) -1)
+ if (c == (clock_t) -1) {
return posix_error();
+ }
+ assert(_PyRuntime.time.ticks_per_second_initialized);
+#define ticks_per_second _PyRuntime.time.ticks_per_second
return build_times_result(module,
(double)t.tms_utime / ticks_per_second,
(double)t.tms_stime / ticks_per_second,
(double)t.tms_cutime / ticks_per_second,
(double)t.tms_cstime / ticks_per_second,
(double)c / ticks_per_second);
+#undef ticks_per_second
}
#endif /* MS_WINDOWS */
#endif /* HAVE_TIMES */
@@ -13528,7 +13540,7 @@ os_get_blocking_impl(PyObject *module, int fd)
/*[clinic input]
os.set_blocking
fd: int
- blocking: bool(accept={int})
+ blocking: bool
/
Set the blocking mode of the specified file descriptor.
@@ -13539,7 +13551,7 @@ clear the O_NONBLOCK flag otherwise.
static PyObject *
os_set_blocking_impl(PyObject *module, int fd, int blocking)
-/*[clinic end generated code: output=384eb43aa0762a9d input=bf5c8efdc5860ff3]*/
+/*[clinic end generated code: output=384eb43aa0762a9d input=7e9dfc9b14804dd4]*/
{
int result;
@@ -15912,7 +15924,7 @@ posixmodule_exec(PyObject *m)
}
PyModule_AddObject(m, "stat_result", Py_NewRef(StatResultType));
state->StatResultType = StatResultType;
- structseq_new = ((PyTypeObject *)StatResultType)->tp_new;
+ state->statresult_new_orig = ((PyTypeObject *)StatResultType)->tp_new;
((PyTypeObject *)StatResultType)->tp_new = statresult_new;
statvfs_result_desc.name = "os.statvfs_result"; /* see issue #19209 */
@@ -15922,15 +15934,6 @@ posixmodule_exec(PyObject *m)
}
PyModule_AddObject(m, "statvfs_result", Py_NewRef(StatVFSResultType));
state->StatVFSResultType = StatVFSResultType;
-#ifdef NEED_TICKS_PER_SECOND
-# if defined(HAVE_SYSCONF) && defined(_SC_CLK_TCK)
- ticks_per_second = sysconf(_SC_CLK_TCK);
-# elif defined(HZ)
- ticks_per_second = HZ;
-# else
- ticks_per_second = 60; /* magic fallback value; may be bogus */
-# endif
-#endif
#if defined(HAVE_SCHED_SETPARAM) || defined(HAVE_SCHED_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDPARAM)
sched_param_desc.name = MODNAME ".sched_param";
diff --git a/Modules/pyexpat.c b/Modules/pyexpat.c
index 0e0a9cf7cc2c5c..2440798bff7e66 100644
--- a/Modules/pyexpat.c
+++ b/Modules/pyexpat.c
@@ -710,7 +710,7 @@ pyexpat.xmlparser.Parse
cls: defining_class
data: object
- isfinal: bool(accept={int}) = False
+ isfinal: bool = False
/
Parse XML data.
@@ -721,7 +721,7 @@ Parse XML data.
static PyObject *
pyexpat_xmlparser_Parse_impl(xmlparseobject *self, PyTypeObject *cls,
PyObject *data, int isfinal)
-/*[clinic end generated code: output=8faffe07fe1f862a input=fc97f833558ca715]*/
+/*[clinic end generated code: output=8faffe07fe1f862a input=d0eb2a69fab3b9f1]*/
{
const char *s;
Py_ssize_t slen;
diff --git a/Modules/signalmodule.c b/Modules/signalmodule.c
index c539787e5829dd..538a7e85bc950c 100644
--- a/Modules/signalmodule.c
+++ b/Modules/signalmodule.c
@@ -13,7 +13,7 @@
#include "pycore_moduleobject.h" // _PyModule_GetState()
#include "pycore_pyerrors.h" // _PyErr_SetString()
#include "pycore_pystate.h" // _PyThreadState_GET()
-#include "pycore_signal.h" // Py_NSIG
+#include "pycore_signal.h"
#ifndef MS_WINDOWS
# include "posixmodule.h"
@@ -23,12 +23,13 @@
#endif
#ifdef MS_WINDOWS
-# include
# ifdef HAVE_PROCESS_H
# include
# endif
#endif
+#include "pycore_signal.h" // Py_NSIG
+
#ifdef HAVE_SIGNAL_H
# include
#endif
@@ -100,47 +101,13 @@ class sigset_t_converter(CConverter):
may not be the thread that received the signal.
*/
-static volatile struct {
- _Py_atomic_int tripped;
- /* func is atomic to ensure that PyErr_SetInterrupt is async-signal-safe
- * (even though it would probably be otherwise, anyway).
- */
- _Py_atomic_address func;
-} Handlers[Py_NSIG];
-
-#ifdef MS_WINDOWS
-#define INVALID_FD ((SOCKET_T)-1)
-
-static volatile struct {
- SOCKET_T fd;
- int warn_on_full_buffer;
- int use_send;
-} wakeup = {.fd = INVALID_FD, .warn_on_full_buffer = 1, .use_send = 0};
-#else
-#define INVALID_FD (-1)
-static volatile struct {
-#ifdef __VXWORKS__
- int fd;
-#else
- sig_atomic_t fd;
-#endif
- int warn_on_full_buffer;
-} wakeup = {.fd = INVALID_FD, .warn_on_full_buffer = 1};
-#endif
-
-/* Speed up sigcheck() when none tripped */
-static _Py_atomic_int is_tripped;
-
-typedef struct {
- PyObject *default_handler;
- PyObject *ignore_handler;
-#ifdef MS_WINDOWS
- HANDLE sigint_event;
-#endif
-} signal_state_t;
+#define Handlers _PyRuntime.signals.handlers
+#define wakeup _PyRuntime.signals.wakeup
+#define is_tripped _PyRuntime.signals.is_tripped
// State shared by all Python interpreters
-static signal_state_t signal_global_state = {0};
+typedef struct _signals_runtime_state signal_state_t;
+#define signal_global_state _PyRuntime.signals
#if defined(HAVE_GETITIMER) || defined(HAVE_SETITIMER)
# define PYHAVE_ITIMER_ERROR
@@ -331,13 +298,7 @@ trip_signal(int sig_num)
See bpo-30038 for more details.
*/
- int fd;
-#ifdef MS_WINDOWS
- fd = Py_SAFE_DOWNCAST(wakeup.fd, SOCKET_T, int);
-#else
- fd = wakeup.fd;
-#endif
-
+ int fd = wakeup.fd;
if (fd != INVALID_FD) {
unsigned char byte = (unsigned char)sig_num;
#ifdef MS_WINDOWS
@@ -407,7 +368,7 @@ signal_handler(int sig_num)
#ifdef MS_WINDOWS
if (sig_num == SIGINT) {
signal_state_t *state = &signal_global_state;
- SetEvent(state->sigint_event);
+ SetEvent((HANDLE)state->sigint_event);
}
#endif
}
@@ -822,7 +783,7 @@ signal_set_wakeup_fd(PyObject *self, PyObject *args, PyObject *kwds)
}
old_sockfd = wakeup.fd;
- wakeup.fd = sockfd;
+ wakeup.fd = Py_SAFE_DOWNCAST(sockfd, SOCKET_T, int);
wakeup.warn_on_full_buffer = warn_on_full_buffer;
wakeup.use_send = is_socket;
@@ -873,11 +834,7 @@ PySignal_SetWakeupFd(int fd)
fd = -1;
}
-#ifdef MS_WINDOWS
- int old_fd = Py_SAFE_DOWNCAST(wakeup.fd, SOCKET_T, int);
-#else
int old_fd = wakeup.fd;
-#endif
wakeup.fd = fd;
wakeup.warn_on_full_buffer = 1;
return old_fd;
@@ -1654,6 +1611,8 @@ signal_module_exec(PyObject *m)
signal_state_t *state = &signal_global_state;
_signal_module_state *modstate = get_signal_state(m);
+ // XXX For proper isolation, these values must be guaranteed
+ // to be effectively const (e.g. immortal).
modstate->default_handler = state->default_handler; // borrowed ref
modstate->ignore_handler = state->ignore_handler; // borrowed ref
@@ -1783,7 +1742,7 @@ _PySignal_Fini(void)
#ifdef MS_WINDOWS
if (state->sigint_event != NULL) {
- CloseHandle(state->sigint_event);
+ CloseHandle((HANDLE)state->sigint_event);
state->sigint_event = NULL;
}
#endif
@@ -2009,7 +1968,7 @@ _PySignal_Init(int install_signal_handlers)
#ifdef MS_WINDOWS
/* Create manual-reset event, initially unset */
- state->sigint_event = CreateEvent(NULL, TRUE, FALSE, FALSE);
+ state->sigint_event = (void *)CreateEvent(NULL, TRUE, FALSE, FALSE);
if (state->sigint_event == NULL) {
PyErr_SetFromWindowsErr(0);
return -1;
diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c
index 4ecb88cef7224e..2c59c2f2c89b25 100644
--- a/Modules/socketmodule.c
+++ b/Modules/socketmodule.c
@@ -2928,8 +2928,8 @@ sock_setblocking(PySocketSockObject *s, PyObject *arg)
{
long block;
- block = PyLong_AsLong(arg);
- if (block == -1 && PyErr_Occurred())
+ block = PyObject_IsTrue(arg);
+ if (block < 0)
return NULL;
s->sock_timeout = _PyTime_FromSeconds(block ? -1 : 0);
@@ -8484,18 +8484,78 @@ PyInit__socket(void)
#ifdef TCP_QUICKACK
PyModule_AddIntMacro(m, TCP_QUICKACK);
#endif
-#ifdef TCP_FASTOPEN
- PyModule_AddIntMacro(m, TCP_FASTOPEN);
-#endif
#ifdef TCP_CONGESTION
PyModule_AddIntMacro(m, TCP_CONGESTION);
#endif
+#ifdef TCP_MD5SIG
+ PyModule_AddIntMacro(m, TCP_MD5SIG);
+#endif
+#ifdef TCP_THIN_LINEAR_TIMEOUTS
+ PyModule_AddIntMacro(m, TCP_THIN_LINEAR_TIMEOUTS);
+#endif
+#ifdef TCP_THIN_DUPACK
+ PyModule_AddIntMacro(m, TCP_THIN_DUPACK);
+#endif
#ifdef TCP_USER_TIMEOUT
PyModule_AddIntMacro(m, TCP_USER_TIMEOUT);
#endif
+#ifdef TCP_REPAIR
+ PyModule_AddIntMacro(m, TCP_REPAIR);
+#endif
+#ifdef TCP_REPAIR_QUEUE
+ PyModule_AddIntMacro(m, TCP_REPAIR_QUEUE);
+#endif
+#ifdef TCP_QUEUE_SEQ
+ PyModule_AddIntMacro(m, TCP_QUEUE_SEQ);
+#endif
+#ifdef TCP_REPAIR_OPTIONS
+ PyModule_AddIntMacro(m, TCP_REPAIR_OPTIONS);
+#endif
+#ifdef TCP_FASTOPEN
+ PyModule_AddIntMacro(m, TCP_FASTOPEN);
+#endif
+#ifdef TCP_TIMESTAMP
+ PyModule_AddIntMacro(m, TCP_TIMESTAMP);
+#endif
#ifdef TCP_NOTSENT_LOWAT
PyModule_AddIntMacro(m, TCP_NOTSENT_LOWAT);
#endif
+#ifdef TCP_CC_INFO
+ PyModule_AddIntMacro(m, TCP_CC_INFO);
+#endif
+#ifdef TCP_SAVE_SYN
+ PyModule_AddIntMacro(m, TCP_SAVE_SYN);
+#endif
+#ifdef TCP_SAVED_SYN
+ PyModule_AddIntMacro(m, TCP_SAVED_SYN);
+#endif
+#ifdef TCP_REPAIR_WINDOW
+ PyModule_AddIntMacro(m, TCP_REPAIR_WINDOW);
+#endif
+#ifdef TCP_FASTOPEN_CONNECT
+ PyModule_AddIntMacro(m, TCP_FASTOPEN_CONNECT);
+#endif
+#ifdef TCP_ULP
+ PyModule_AddIntMacro(m, TCP_ULP);
+#endif
+#ifdef TCP_MD5SIG_EXT
+ PyModule_AddIntMacro(m, TCP_MD5SIG_EXT);
+#endif
+#ifdef TCP_FASTOPEN_KEY
+ PyModule_AddIntMacro(m, TCP_FASTOPEN_KEY);
+#endif
+#ifdef TCP_FASTOPEN_NO_COOKIE
+ PyModule_AddIntMacro(m, TCP_FASTOPEN_NO_COOKIE);
+#endif
+#ifdef TCP_ZEROCOPY_RECEIVE
+ PyModule_AddIntMacro(m, TCP_ZEROCOPY_RECEIVE);
+#endif
+#ifdef TCP_INQ
+ PyModule_AddIntMacro(m, TCP_INQ);
+#endif
+#ifdef TCP_TX_DELAY
+ PyModule_AddIntMacro(m, TCP_TX_DELAY);
+#endif
/* IPX options */
#ifdef IPX_TYPE
diff --git a/Modules/timemodule.c b/Modules/timemodule.c
index 11c888af03e82d..c2bacaae0c0339 100644
--- a/Modules/timemodule.c
+++ b/Modules/timemodule.c
@@ -62,6 +62,56 @@
#define SEC_TO_NS (1000 * 1000 * 1000)
+#if defined(HAVE_TIMES) || defined(HAVE_CLOCK)
+static int
+check_ticks_per_second(long tps, const char *context)
+{
+ /* Effectively, check that _PyTime_MulDiv(t, SEC_TO_NS, ticks_per_second)
+ cannot overflow. */
+ if (tps >= 0 && (_PyTime_t)tps > _PyTime_MAX / SEC_TO_NS) {
+ PyErr_Format(PyExc_OverflowError, "%s is too large", context);
+ return -1;
+ }
+ return 0;
+}
+#endif /* HAVE_TIMES || HAVE_CLOCK */
+
+#ifdef HAVE_TIMES
+
+# define ticks_per_second _PyRuntime.time.ticks_per_second
+
+static void
+ensure_ticks_per_second(void)
+{
+ if (_PyRuntime.time.ticks_per_second_initialized) {
+ return;
+ }
+ _PyRuntime.time.ticks_per_second_initialized = 1;
+# if defined(HAVE_SYSCONF) && defined(_SC_CLK_TCK)
+ ticks_per_second = sysconf(_SC_CLK_TCK);
+ if (ticks_per_second < 1) {
+ ticks_per_second = -1;
+ }
+# elif defined(HZ)
+ ticks_per_second = HZ;
+# else
+ ticks_per_second = 60; /* magic fallback value; may be bogus */
+# endif
+}
+
+#endif /* HAVE_TIMES */
+
+
+PyStatus
+_PyTime_Init(void)
+{
+#ifdef HAVE_TIMES
+ ensure_ticks_per_second();
+#endif
+ return PyStatus_Ok();
+}
+
+
/* Forward declarations */
static int pysleep(_PyTime_t timeout);
@@ -140,18 +190,8 @@ Return the current time in nanoseconds since the Epoch.");
static int
_PyTime_GetClockWithInfo(_PyTime_t *tp, _Py_clock_info_t *info)
{
- static int initialized = 0;
-
- if (!initialized) {
- initialized = 1;
-
- /* Make sure that _PyTime_MulDiv(ticks, SEC_TO_NS, CLOCKS_PER_SEC)
- above cannot overflow */
- if ((_PyTime_t)CLOCKS_PER_SEC > _PyTime_MAX / SEC_TO_NS) {
- PyErr_SetString(PyExc_OverflowError,
- "CLOCKS_PER_SEC is too large");
- return -1;
- }
+ if (check_ticks_per_second(CLOCKS_PER_SEC, "CLOCKS_PER_SEC") < 0) {
+ return -1;
}
if (info) {
@@ -1308,36 +1348,10 @@ _PyTime_GetProcessTimeWithInfo(_PyTime_t *tp, _Py_clock_info_t *info)
struct tms t;
if (times(&t) != (clock_t)-1) {
- static long ticks_per_second = -1;
-
- if (ticks_per_second == -1) {
- long freq;
-#if defined(HAVE_SYSCONF) && defined(_SC_CLK_TCK)
- freq = sysconf(_SC_CLK_TCK);
- if (freq < 1) {
- freq = -1;
- }
-#elif defined(HZ)
- freq = HZ;
-#else
- freq = 60; /* magic fallback value; may be bogus */
-#endif
-
- if (freq != -1) {
- /* check that _PyTime_MulDiv(t, SEC_TO_NS, ticks_per_second)
- cannot overflow below */
-#if LONG_MAX > _PyTime_MAX / SEC_TO_NS
- if ((_PyTime_t)freq > _PyTime_MAX / SEC_TO_NS) {
- PyErr_SetString(PyExc_OverflowError,
- "_SC_CLK_TCK is too large");
- return -1;
- }
-#endif
-
- ticks_per_second = freq;
- }
+ assert(_PyRuntime.time.ticks_per_second_initialized);
+ if (check_ticks_per_second(ticks_per_second, "_SC_CLK_TCK") < 0) {
+ return -1;
}
-
if (ticks_per_second != -1) {
if (info) {
info->implementation = "times()";
diff --git a/Objects/bytearrayobject.c b/Objects/bytearrayobject.c
index f24690a02bda26..072089e39aa207 100644
--- a/Objects/bytearrayobject.c
+++ b/Objects/bytearrayobject.c
@@ -2012,7 +2012,7 @@ bytearray_join(PyByteArrayObject *self, PyObject *iterable_of_bytes)
/*[clinic input]
bytearray.splitlines
- keepends: bool(accept={int}) = False
+ keepends: bool = False
Return a list of the lines in the bytearray, breaking at line boundaries.
@@ -2022,7 +2022,7 @@ true.
static PyObject *
bytearray_splitlines_impl(PyByteArrayObject *self, int keepends)
-/*[clinic end generated code: output=4223c94b895f6ad9 input=99a27ad959b9cf6b]*/
+/*[clinic end generated code: output=4223c94b895f6ad9 input=66b2dcdea8d093bf]*/
{
return stringlib_splitlines(
(PyObject*) self, PyByteArray_AS_STRING(self),
diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c
index a63f396e022f71..0fd10fa00d16fa 100644
--- a/Objects/bytesobject.c
+++ b/Objects/bytesobject.c
@@ -2314,7 +2314,7 @@ bytes_decode_impl(PyBytesObject *self, const char *encoding,
/*[clinic input]
bytes.splitlines
- keepends: bool(accept={int}) = False
+ keepends: bool = False
Return a list of the lines in the bytes, breaking at line boundaries.
@@ -2324,7 +2324,7 @@ true.
static PyObject *
bytes_splitlines_impl(PyBytesObject *self, int keepends)
-/*[clinic end generated code: output=3484149a5d880ffb input=a8b32eb01ff5a5ed]*/
+/*[clinic end generated code: output=3484149a5d880ffb input=5d7b898af2fe55c0]*/
{
return stringlib_splitlines(
(PyObject*) self, PyBytes_AS_STRING(self),
diff --git a/Objects/clinic/bytearrayobject.c.h b/Objects/clinic/bytearrayobject.c.h
index 142f2998160725..e7bf3183af8522 100644
--- a/Objects/clinic/bytearrayobject.c.h
+++ b/Objects/clinic/bytearrayobject.c.h
@@ -1084,8 +1084,8 @@ bytearray_splitlines(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t
if (!noptargs) {
goto skip_optional_pos;
}
- keepends = _PyLong_AsInt(args[0]);
- if (keepends == -1 && PyErr_Occurred()) {
+ keepends = PyObject_IsTrue(args[0]);
+ if (keepends < 0) {
goto exit;
}
skip_optional_pos:
@@ -1287,4 +1287,4 @@ bytearray_sizeof(PyByteArrayObject *self, PyObject *Py_UNUSED(ignored))
{
return bytearray_sizeof_impl(self);
}
-/*[clinic end generated code: output=72bfa6cac2fd6832 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=022698e8b0faa272 input=a9049054013a1b77]*/
diff --git a/Objects/clinic/bytesobject.c.h b/Objects/clinic/bytesobject.c.h
index 904124ec479abb..060056dafbd84f 100644
--- a/Objects/clinic/bytesobject.c.h
+++ b/Objects/clinic/bytesobject.c.h
@@ -839,8 +839,8 @@ bytes_splitlines(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, P
if (!noptargs) {
goto skip_optional_pos;
}
- keepends = _PyLong_AsInt(args[0]);
- if (keepends == -1 && PyErr_Occurred()) {
+ keepends = PyObject_IsTrue(args[0]);
+ if (keepends < 0) {
goto exit;
}
skip_optional_pos:
@@ -1063,4 +1063,4 @@ bytes_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
exit:
return return_value;
}
-/*[clinic end generated code: output=5e0a25b7ba749a04 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=31a9e4af85562612 input=a9049054013a1b77]*/
diff --git a/Objects/clinic/listobject.c.h b/Objects/clinic/listobject.c.h
index 926eaa5d36983b..94852e99617060 100644
--- a/Objects/clinic/listobject.c.h
+++ b/Objects/clinic/listobject.c.h
@@ -215,8 +215,8 @@ list_sort(PyListObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject
goto skip_optional_kwonly;
}
}
- reverse = _PyLong_AsInt(args[1]);
- if (reverse == -1 && PyErr_Occurred()) {
+ reverse = PyObject_IsTrue(args[1]);
+ if (reverse < 0) {
goto exit;
}
skip_optional_kwonly:
@@ -382,4 +382,4 @@ list___reversed__(PyListObject *self, PyObject *Py_UNUSED(ignored))
{
return list___reversed___impl(self);
}
-/*[clinic end generated code: output=782ed6c68b1c9f83 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=4e6f38b655394564 input=a9049054013a1b77]*/
diff --git a/Objects/clinic/unicodeobject.c.h b/Objects/clinic/unicodeobject.c.h
index d803a2733bd636..f640c997577363 100644
--- a/Objects/clinic/unicodeobject.c.h
+++ b/Objects/clinic/unicodeobject.c.h
@@ -1193,8 +1193,8 @@ unicode_splitlines(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyOb
if (!noptargs) {
goto skip_optional_pos;
}
- keepends = _PyLong_AsInt(args[0]);
- if (keepends == -1 && PyErr_Occurred()) {
+ keepends = PyObject_IsTrue(args[0]);
+ if (keepends < 0) {
goto exit;
}
skip_optional_pos:
@@ -1497,4 +1497,4 @@ unicode_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
exit:
return return_value;
}
-/*[clinic end generated code: output=e775ff4154f1c935 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=05d942840635dadf input=a9049054013a1b77]*/
diff --git a/Objects/codeobject.c b/Objects/codeobject.c
index f5d90cf65fcec3..f455cc603aae9c 100644
--- a/Objects/codeobject.c
+++ b/Objects/codeobject.c
@@ -11,6 +11,65 @@
#include "pycore_tuple.h" // _PyTuple_ITEMS()
#include "clinic/codeobject.c.h"
+static void
+notify_code_watchers(PyCodeEvent event, PyCodeObject *co)
+{
+ PyInterpreterState *interp = _PyInterpreterState_GET();
+ if (interp->active_code_watchers) {
+ assert(interp->_initialized);
+ for (int i = 0; i < CODE_MAX_WATCHERS; i++) {
+ PyCode_WatchCallback cb = interp->code_watchers[i];
+ if ((cb != NULL) && (cb(event, co) < 0)) {
+ PyErr_WriteUnraisable((PyObject *) co);
+ }
+ }
+ }
+}
+
+int
+PyCode_AddWatcher(PyCode_WatchCallback callback)
+{
+ PyInterpreterState *interp = _PyInterpreterState_GET();
+ assert(interp->_initialized);
+
+ for (int i = 0; i < CODE_MAX_WATCHERS; i++) {
+ if (!interp->code_watchers[i]) {
+ interp->code_watchers[i] = callback;
+ interp->active_code_watchers |= (1 << i);
+ return i;
+ }
+ }
+
+ PyErr_SetString(PyExc_RuntimeError, "no more code watcher IDs available");
+ return -1;
+}
+
+static inline int
+validate_watcher_id(PyInterpreterState *interp, int watcher_id)
+{
+ if (watcher_id < 0 || watcher_id >= CODE_MAX_WATCHERS) {
+ PyErr_Format(PyExc_ValueError, "Invalid code watcher ID %d", watcher_id);
+ return -1;
+ }
+ if (!interp->code_watchers[watcher_id]) {
+ PyErr_Format(PyExc_ValueError, "No code watcher set for ID %d", watcher_id);
+ return -1;
+ }
+ return 0;
+}
+
+int
+PyCode_ClearWatcher(int watcher_id)
+{
+ PyInterpreterState *interp = _PyInterpreterState_GET();
+ assert(interp->_initialized);
+ if (validate_watcher_id(interp, watcher_id) < 0) {
+ return -1;
+ }
+ interp->code_watchers[watcher_id] = NULL;
+ interp->active_code_watchers &= ~(1 << watcher_id);
+ return 0;
+}
/******************
* generic helpers
@@ -338,7 +397,10 @@ init_code(PyCodeObject *co, struct _PyCodeConstructor *con)
co->co_nplaincellvars = nplaincellvars;
co->co_ncellvars = ncellvars;
co->co_nfreevars = nfreevars;
-
+ co->co_version = _Py_next_func_version;
+ if (_Py_next_func_version != 0) {
+ _Py_next_func_version++;
+ }
/* not set */
co->co_weakreflist = NULL;
co->co_extra = NULL;
@@ -355,6 +417,7 @@ init_code(PyCodeObject *co, struct _PyCodeConstructor *con)
}
co->_co_firsttraceable = entry_point;
_PyCode_Quicken(co);
+ notify_code_watchers(PY_CODE_EVENT_CREATE, co);
}
static int
@@ -1457,9 +1520,10 @@ deopt_code(_Py_CODEUNIT *instructions, Py_ssize_t len)
_Py_CODEUNIT instruction = instructions[i];
int opcode = _PyOpcode_Deopt[_Py_OPCODE(instruction)];
int caches = _PyOpcode_Caches[opcode];
- instructions[i] = _Py_MAKECODEUNIT(opcode, _Py_OPARG(instruction));
+ instructions[i].opcode = opcode;
while (caches--) {
- instructions[++i] = _Py_MAKECODEUNIT(CACHE, 0);
+ instructions[++i].opcode = CACHE;
+ instructions[i].oparg = 0;
}
}
}
@@ -1615,6 +1679,8 @@ code_new_impl(PyTypeObject *type, int argcount, int posonlyargcount,
static void
code_dealloc(PyCodeObject *co)
{
+ notify_code_watchers(PY_CODE_EVENT_DESTROY, co);
+
if (co->co_extra != NULL) {
PyInterpreterState *interp = _PyInterpreterState_GET();
_PyCodeObjectExtra *co_extra = co->co_extra;
@@ -1710,9 +1776,9 @@ code_richcompare(PyObject *self, PyObject *other, int op)
for (int i = 0; i < Py_SIZE(co); i++) {
_Py_CODEUNIT co_instr = _PyCode_CODE(co)[i];
_Py_CODEUNIT cp_instr = _PyCode_CODE(cp)[i];
- _Py_SET_OPCODE(co_instr, _PyOpcode_Deopt[_Py_OPCODE(co_instr)]);
- _Py_SET_OPCODE(cp_instr, _PyOpcode_Deopt[_Py_OPCODE(cp_instr)]);
- eq = co_instr == cp_instr;
+ co_instr.opcode = _PyOpcode_Deopt[_Py_OPCODE(co_instr)];
+ cp_instr.opcode =_PyOpcode_Deopt[_Py_OPCODE(cp_instr)];
+ eq = co_instr.cache == cp_instr.cache;
if (!eq) {
goto unequal;
}
diff --git a/Objects/funcobject.c b/Objects/funcobject.c
index bf97edc53ad7d9..9df06520586ab7 100644
--- a/Objects/funcobject.c
+++ b/Objects/funcobject.c
@@ -3,7 +3,7 @@
#include "Python.h"
#include "pycore_ceval.h" // _PyEval_BuiltinsFromGlobals()
-#include "pycore_function.h" // FUNC_MAX_WATCHERS
+#include "pycore_code.h" // _Py_next_func_version
#include "pycore_object.h" // _PyObject_GC_UNTRACK()
#include "pycore_pyerrors.h" // _PyErr_Occurred()
#include "structmember.h" // PyMemberDef
@@ -64,7 +64,6 @@ PyFunction_ClearWatcher(int watcher_id)
interp->active_func_watchers &= ~(1 << watcher_id);
return 0;
}
-
PyFunctionObject *
_PyFunction_FromConstructor(PyFrameConstructor *constr)
{
diff --git a/Objects/listobject.c b/Objects/listobject.c
index 0e696fbffb3f53..1d32915b17a14b 100644
--- a/Objects/listobject.c
+++ b/Objects/listobject.c
@@ -2227,7 +2227,7 @@ list.sort
*
key as keyfunc: object = None
- reverse: bool(accept={int}) = False
+ reverse: bool = False
Sort the list in ascending order and return None.
@@ -2242,7 +2242,7 @@ The reverse flag can be set to sort in descending order.
static PyObject *
list_sort_impl(PyListObject *self, PyObject *keyfunc, int reverse)
-/*[clinic end generated code: output=57b9f9c5e23fbe42 input=cb56cd179a713060]*/
+/*[clinic end generated code: output=57b9f9c5e23fbe42 input=a74c4cd3ec6b5c08]*/
{
MergeState ms;
Py_ssize_t nremaining;
diff --git a/Objects/longobject.c b/Objects/longobject.c
index c84b4d3f316d5d..8596ce9797b5a6 100644
--- a/Objects/longobject.c
+++ b/Objects/longobject.c
@@ -36,8 +36,8 @@ medium_value(PyLongObject *x)
#define IS_SMALL_INT(ival) (-_PY_NSMALLNEGINTS <= (ival) && (ival) < _PY_NSMALLPOSINTS)
#define IS_SMALL_UINT(ival) ((ival) < _PY_NSMALLPOSINTS)
-#define _MAX_STR_DIGITS_ERROR_FMT_TO_INT "Exceeds the limit (%d) for integer string conversion: value has %zd digits; use sys.set_int_max_str_digits() to increase the limit"
-#define _MAX_STR_DIGITS_ERROR_FMT_TO_STR "Exceeds the limit (%d) for integer string conversion; use sys.set_int_max_str_digits() to increase the limit"
+#define _MAX_STR_DIGITS_ERROR_FMT_TO_INT "Exceeds the limit (%d digits) for integer string conversion: value has %zd digits; use sys.set_int_max_str_digits() to increase the limit"
+#define _MAX_STR_DIGITS_ERROR_FMT_TO_STR "Exceeds the limit (%d digits) for integer string conversion; use sys.set_int_max_str_digits() to increase the limit"
/* If defined, use algorithms from the _pylong.py module */
#define WITH_PYLONG_MODULE 1
diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c
index 4c08bc214cd27a..276c5a276c06e6 100644
--- a/Objects/obmalloc.c
+++ b/Objects/obmalloc.c
@@ -908,11 +908,12 @@ new_arena(void)
struct arena_object* arenaobj;
uint excess; /* number of bytes above pool alignment */
void *address;
- static int debug_stats = -1;
+ int debug_stats = _PyRuntime.obmalloc.dump_debug_stats;
if (debug_stats == -1) {
const char *opt = Py_GETENV("PYTHONMALLOCSTATS");
debug_stats = (opt != NULL && *opt != '\0');
+ _PyRuntime.obmalloc.dump_debug_stats = debug_stats;
}
if (debug_stats) {
_PyObject_DebugMallocStats(stderr);
diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c
index 4405125d45e7cc..e1b9953226c0d7 100644
--- a/Objects/tupleobject.c
+++ b/Objects/tupleobject.c
@@ -995,14 +995,9 @@ _PyTuple_ClearFreeList(PyInterpreterState *interp)
/*********************** Tuple Iterator **************************/
-typedef struct {
- PyObject_HEAD
- Py_ssize_t it_index;
- PyTupleObject *it_seq; /* Set to NULL when iterator is exhausted */
-} tupleiterobject;
static void
-tupleiter_dealloc(tupleiterobject *it)
+tupleiter_dealloc(_PyTupleIterObject *it)
{
_PyObject_GC_UNTRACK(it);
Py_XDECREF(it->it_seq);
@@ -1010,14 +1005,14 @@ tupleiter_dealloc(tupleiterobject *it)
}
static int
-tupleiter_traverse(tupleiterobject *it, visitproc visit, void *arg)
+tupleiter_traverse(_PyTupleIterObject *it, visitproc visit, void *arg)
{
Py_VISIT(it->it_seq);
return 0;
}
static PyObject *
-tupleiter_next(tupleiterobject *it)
+tupleiter_next(_PyTupleIterObject *it)
{
PyTupleObject *seq;
PyObject *item;
@@ -1040,7 +1035,7 @@ tupleiter_next(tupleiterobject *it)
}
static PyObject *
-tupleiter_len(tupleiterobject *it, PyObject *Py_UNUSED(ignored))
+tupleiter_len(_PyTupleIterObject *it, PyObject *Py_UNUSED(ignored))
{
Py_ssize_t len = 0;
if (it->it_seq)
@@ -1051,7 +1046,7 @@ tupleiter_len(tupleiterobject *it, PyObject *Py_UNUSED(ignored))
PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
static PyObject *
-tupleiter_reduce(tupleiterobject *it, PyObject *Py_UNUSED(ignored))
+tupleiter_reduce(_PyTupleIterObject *it, PyObject *Py_UNUSED(ignored))
{
if (it->it_seq)
return Py_BuildValue("N(O)n", _PyEval_GetBuiltin(&_Py_ID(iter)),
@@ -1061,7 +1056,7 @@ tupleiter_reduce(tupleiterobject *it, PyObject *Py_UNUSED(ignored))
}
static PyObject *
-tupleiter_setstate(tupleiterobject *it, PyObject *state)
+tupleiter_setstate(_PyTupleIterObject *it, PyObject *state)
{
Py_ssize_t index = PyLong_AsSsize_t(state);
if (index == -1 && PyErr_Occurred())
@@ -1089,7 +1084,7 @@ static PyMethodDef tupleiter_methods[] = {
PyTypeObject PyTupleIter_Type = {
PyVarObject_HEAD_INIT(&PyType_Type, 0)
"tuple_iterator", /* tp_name */
- sizeof(tupleiterobject), /* tp_basicsize */
+ sizeof(_PyTupleIterObject), /* tp_basicsize */
0, /* tp_itemsize */
/* methods */
(destructor)tupleiter_dealloc, /* tp_dealloc */
@@ -1122,13 +1117,13 @@ PyTypeObject PyTupleIter_Type = {
static PyObject *
tuple_iter(PyObject *seq)
{
- tupleiterobject *it;
+ _PyTupleIterObject *it;
if (!PyTuple_Check(seq)) {
PyErr_BadInternalCall();
return NULL;
}
- it = PyObject_GC_New(tupleiterobject, &PyTupleIter_Type);
+ it = PyObject_GC_New(_PyTupleIterObject, &PyTupleIter_Type);
if (it == NULL)
return NULL;
it->it_index = 0;
diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c
index 19bde13a6f238a..b721ccd805edf1 100644
--- a/Objects/unicodeobject.c
+++ b/Objects/unicodeobject.c
@@ -5697,8 +5697,6 @@ PyUnicode_AsUTF16String(PyObject *unicode)
/* --- Unicode Escape Codec ----------------------------------------------- */
-static _PyUnicode_Name_CAPI *ucnhash_capi = NULL;
-
PyObject *
_PyUnicode_DecodeUnicodeEscapeInternal(const char *s,
Py_ssize_t size,
@@ -5711,6 +5709,8 @@ _PyUnicode_DecodeUnicodeEscapeInternal(const char *s,
const char *end;
PyObject *errorHandler = NULL;
PyObject *exc = NULL;
+ _PyUnicode_Name_CAPI *ucnhash_capi;
+ PyInterpreterState *interp = _PyInterpreterState_Get();
// so we can remember if we've seen an invalid escape char or not
*first_invalid_escape = NULL;
@@ -5858,6 +5858,7 @@ _PyUnicode_DecodeUnicodeEscapeInternal(const char *s,
/* \N{name} */
case 'N':
+ ucnhash_capi = interp->unicode.ucnhash_capi;
if (ucnhash_capi == NULL) {
/* load the unicode data module */
ucnhash_capi = (_PyUnicode_Name_CAPI *)PyCapsule_Import(
@@ -5869,6 +5870,7 @@ _PyUnicode_DecodeUnicodeEscapeInternal(const char *s,
);
goto onError;
}
+ interp->unicode.ucnhash_capi = ucnhash_capi;
}
message = "malformed \\N character escape";
@@ -12444,7 +12446,7 @@ unicode_rsplit_impl(PyObject *self, PyObject *sep, Py_ssize_t maxsplit)
/*[clinic input]
str.splitlines as unicode_splitlines
- keepends: bool(accept={int}) = False
+ keepends: bool = False
Return a list of the lines in the string, breaking at line boundaries.
@@ -12454,7 +12456,7 @@ true.
static PyObject *
unicode_splitlines_impl(PyObject *self, int keepends)
-/*[clinic end generated code: output=f664dcdad153ec40 input=b508e180459bdd8b]*/
+/*[clinic end generated code: output=f664dcdad153ec40 input=ba6ad05ee85d2b55]*/
{
return PyUnicode_Splitlines(self, keepends);
}
@@ -15128,10 +15130,10 @@ _PyUnicode_Fini(PyInterpreterState *interp)
assert(get_interned_dict() == NULL);
// bpo-47182: force a unicodedata CAPI capsule re-import on
// subsequent initialization of main interpreter.
- ucnhash_capi = NULL;
}
_PyUnicode_FiniEncodings(&state->fs_codec);
+ interp->unicode.ucnhash_capi = NULL;
unicode_clear_identifiers(state);
}
diff --git a/PC/pyconfig.h b/PC/pyconfig.h
index 1a33d4c5a1e4fc..1d8408b363a66a 100644
--- a/PC/pyconfig.h
+++ b/PC/pyconfig.h
@@ -209,6 +209,16 @@ typedef int pid_t;
#endif /* _MSC_VER */
+/* ------------------------------------------------------------------------*/
+/* mingw and mingw-w64 define __MINGW32__ */
+#ifdef __MINGW32__
+
+#ifdef _WIN64
+#define MS_WIN64
+#endif
+
+#endif /* __MINGW32__*/
+
/* ------------------------------------------------------------------------*/
/* egcs/gnu-win32 defines __GNUC__ and _WIN32 */
#if defined(__GNUC__) && defined(_WIN32)
diff --git a/PC/winreg.c b/PC/winreg.c
index df34e8cf5a77a9..63b37be526ab80 100644
--- a/PC/winreg.c
+++ b/PC/winreg.c
@@ -561,42 +561,54 @@ Py2Reg(PyObject *value, DWORD typ, BYTE **retDataBuf, DWORD *retDataSize)
{
Py_ssize_t i,j;
switch (typ) {
- case REG_DWORD:
- if (value != Py_None && !PyLong_Check(value))
- return FALSE;
- *retDataBuf = (BYTE *)PyMem_NEW(DWORD, 1);
- if (*retDataBuf == NULL){
- PyErr_NoMemory();
- return FALSE;
- }
- *retDataSize = sizeof(DWORD);
- if (value == Py_None) {
- DWORD zero = 0;
- memcpy(*retDataBuf, &zero, sizeof(DWORD));
- }
- else {
- DWORD d = PyLong_AsUnsignedLong(value);
+ case REG_DWORD:
+ {
+ if (value != Py_None && !PyLong_Check(value)) {
+ return FALSE;
+ }
+ DWORD d;
+ if (value == Py_None) {
+ d = 0;
+ }
+ else if (PyLong_Check(value)) {
+ d = PyLong_AsUnsignedLong(value);
+ if (d == (DWORD)(-1) && PyErr_Occurred()) {
+ return FALSE;
+ }
+ }
+ *retDataBuf = (BYTE *)PyMem_NEW(DWORD, 1);
+ if (*retDataBuf == NULL) {
+ PyErr_NoMemory();
+ return FALSE;
+ }
memcpy(*retDataBuf, &d, sizeof(DWORD));
+ *retDataSize = sizeof(DWORD);
+ break;
}
- break;
- case REG_QWORD:
- if (value != Py_None && !PyLong_Check(value))
- return FALSE;
- *retDataBuf = (BYTE *)PyMem_NEW(DWORD64, 1);
- if (*retDataBuf == NULL){
- PyErr_NoMemory();
- return FALSE;
- }
- *retDataSize = sizeof(DWORD64);
- if (value == Py_None) {
- DWORD64 zero = 0;
- memcpy(*retDataBuf, &zero, sizeof(DWORD64));
- }
- else {
- DWORD64 d = PyLong_AsUnsignedLongLong(value);
+ case REG_QWORD:
+ {
+ if (value != Py_None && !PyLong_Check(value)) {
+ return FALSE;
+ }
+ DWORD64 d;
+ if (value == Py_None) {
+ d = 0;
+ }
+ else if (PyLong_Check(value)) {
+ d = PyLong_AsUnsignedLongLong(value);
+ if (d == (DWORD64)(-1) && PyErr_Occurred()) {
+ return FALSE;
+ }
+ }
+ *retDataBuf = (BYTE *)PyMem_NEW(DWORD64, 1);
+ if (*retDataBuf == NULL) {
+ PyErr_NoMemory();
+ return FALSE;
+ }
memcpy(*retDataBuf, &d, sizeof(DWORD64));
+ *retDataSize = sizeof(DWORD64);
+ break;
}
- break;
case REG_SZ:
case REG_EXPAND_SZ:
{
diff --git a/PCbuild/_freeze_module.vcxproj b/PCbuild/_freeze_module.vcxproj
index 8454bd67b1db1b..fce1f670510001 100644
--- a/PCbuild/_freeze_module.vcxproj
+++ b/PCbuild/_freeze_module.vcxproj
@@ -110,6 +110,7 @@
+
diff --git a/PCbuild/_freeze_module.vcxproj.filters b/PCbuild/_freeze_module.vcxproj.filters
index 6e8498dceb1cfa..dce6278987c5df 100644
--- a/PCbuild/_freeze_module.vcxproj.filters
+++ b/PCbuild/_freeze_module.vcxproj.filters
@@ -367,6 +367,9 @@
Source Files
+
+ Source Files
+
Source Files
diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj
index f62434370cfdf7..bb2aaae3317b02 100644
--- a/PCbuild/pythoncore.vcxproj
+++ b/PCbuild/pythoncore.vcxproj
@@ -204,6 +204,7 @@
+
@@ -213,6 +214,7 @@
+
@@ -244,6 +246,7 @@
+
@@ -254,8 +257,10 @@
+
+
diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters
index f44a1ad8550a38..339e7cc4937a3d 100644
--- a/PCbuild/pythoncore.vcxproj.filters
+++ b/PCbuild/pythoncore.vcxproj.filters
@@ -519,6 +519,9 @@
Include\internal
+
+ Include\internal
+
Include\internal
@@ -546,6 +549,9 @@
Include\internal
+
+ Include\internal
+
Include\internal
@@ -555,6 +561,9 @@
Include\internal
+
+ Include\internal
+
Include\internal
@@ -636,6 +645,9 @@
Include\internal
+
+ Include\internal
+
Include\internal
@@ -663,12 +675,18 @@
Include\internal
+
+ Include\internal
+
Include\internal
Include\internal
+
+ Include\internal
+
Include\internal
diff --git a/Parser/action_helpers.c b/Parser/action_helpers.c
index 27c093332f6725..f12dad095acaa8 100644
--- a/Parser/action_helpers.c
+++ b/Parser/action_helpers.c
@@ -13,6 +13,7 @@ void *
_PyPegen_dummy_name(Parser *p, ...)
{
// XXX This leaks memory from the initial arena.
+ // Use a statically allocated variable instead of a pointer?
static void *cache = NULL;
if (cache != NULL) {
@@ -1287,4 +1288,4 @@ _PyPegen_nonparen_genexp_in_call(Parser *p, expr_ty args, asdl_comprehension_seq
_PyPegen_get_last_comprehension_item(last_comprehension),
"Generator expression must be parenthesized"
);
-}
\ No newline at end of file
+}
diff --git a/Parser/pegen.c b/Parser/pegen.c
index d34a86e9c883de..d84e06861edefc 100644
--- a/Parser/pegen.c
+++ b/Parser/pegen.c
@@ -246,8 +246,8 @@ _PyPegen_fill_token(Parser *p)
// The array counts the number of tokens skipped by memoization,
// indexed by type.
-#define NSTATISTICS 2000
-static long memo_statistics[NSTATISTICS];
+#define NSTATISTICS _PYPEGEN_NSTATISTICS
+#define memo_statistics _PyRuntime.parser.memo_statistics
void
_PyPegen_clear_memo_statistics()
diff --git a/Parser/pegen_errors.c b/Parser/pegen_errors.c
index 7738cbaf9ef39e..6ea7600119b643 100644
--- a/Parser/pegen_errors.c
+++ b/Parser/pegen_errors.c
@@ -169,6 +169,10 @@ _PyPegen_tokenize_full_source_to_check_for_errors(Parser *p) {
for (;;) {
switch (_PyTokenizer_Get(p->tok, &new_token)) {
case ERRORTOKEN:
+ if (PyErr_Occurred()) {
+ ret = -1;
+ goto exit;
+ }
if (p->tok->level != 0) {
int error_lineno = p->tok->parenlinenostack[p->tok->level-1];
if (current_err_line > error_lineno) {
diff --git a/Programs/_bootstrap_python.c b/Programs/_bootstrap_python.c
index bbac0c4e1a8a45..6c388fc7033dd0 100644
--- a/Programs/_bootstrap_python.c
+++ b/Programs/_bootstrap_python.c
@@ -12,8 +12,11 @@
/* Includes for frozen modules: */
#include "Python/frozen_modules/importlib._bootstrap.h"
#include "Python/frozen_modules/importlib._bootstrap_external.h"
+#include "Python/frozen_modules/zipimport.h"
/* End includes */
+uint32_t _Py_next_func_version = 1;
+
/* Empty initializer for deepfrozen modules */
int _Py_Deepfreeze_Init(void)
{
@@ -30,6 +33,7 @@ _Py_Deepfreeze_Fini(void)
static const struct _frozen bootstrap_modules[] = {
{"_frozen_importlib", _Py_M__importlib__bootstrap, (int)sizeof(_Py_M__importlib__bootstrap)},
{"_frozen_importlib_external", _Py_M__importlib__bootstrap_external, (int)sizeof(_Py_M__importlib__bootstrap_external)},
+ {"zipimport", _Py_M__zipimport, (int)sizeof(_Py_M__zipimport)},
{0, 0, 0} /* bootstrap sentinel */
};
static const struct _frozen stdlib_modules[] = {
diff --git a/Programs/_freeze_module.c b/Programs/_freeze_module.c
index 9e2169f32e9211..90fc2dc6e87da8 100644
--- a/Programs/_freeze_module.c
+++ b/Programs/_freeze_module.c
@@ -9,6 +9,7 @@
Keep this file in sync with Programs/_freeze_module.py.
*/
+
#include
#include
#include "pycore_fileutils.h" // _Py_stat_struct
@@ -22,6 +23,8 @@
#include
#endif
+uint32_t _Py_next_func_version = 1;
+
/* Empty initializer for deepfrozen modules */
int _Py_Deepfreeze_Init(void)
{
diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c
index b3b7e8d6c50530..ff96c25da5ebc6 100644
--- a/Python/bltinmodule.c
+++ b/Python/bltinmodule.c
@@ -714,7 +714,7 @@ compile as builtin_compile
filename: object(converter="PyUnicode_FSDecoder")
mode: str
flags: int = 0
- dont_inherit: bool(accept={int}) = False
+ dont_inherit: bool = False
optimize: int = -1
*
_feature_version as feature_version: int = -1
@@ -737,7 +737,7 @@ static PyObject *
builtin_compile_impl(PyObject *module, PyObject *source, PyObject *filename,
const char *mode, int flags, int dont_inherit,
int optimize, int feature_version)
-/*[clinic end generated code: output=b0c09c84f116d3d7 input=40171fb92c1d580d]*/
+/*[clinic end generated code: output=b0c09c84f116d3d7 input=cc78e20e7c7682ba]*/
{
PyObject *source_copy;
const char *str;
diff --git a/Python/bytecodes.c b/Python/bytecodes.c
index 41dd1acc937d71..c56f1d3ef9f498 100644
--- a/Python/bytecodes.c
+++ b/Python/bytecodes.c
@@ -81,8 +81,17 @@ do { \
// Dummy variables for stack effects.
static PyObject *value, *value1, *value2, *left, *right, *res, *sum, *prod, *sub;
static PyObject *container, *start, *stop, *v, *lhs, *rhs;
-static PyObject *list, *tuple, *dict;
-static PyObject *exit_func, *lasti, *val;
+static PyObject *list, *tuple, *dict, *owner;
+static PyObject *exit_func, *lasti, *val, *retval, *obj, *iter;
+static size_t jump;
+// Dummy variables for cache effects
+static _Py_CODEUNIT when_to_jump_mask, invert, counter, index, hint;
+static uint32_t type_version;
+// Dummy opcode names for 'op' opcodes
+#define _COMPARE_OP_FLOAT 1003
+#define _COMPARE_OP_INT 1004
+#define _COMPARE_OP_STR 1005
+#define _JUMP_IF 1006
static PyObject *
dummy_func(
@@ -205,7 +214,7 @@ dummy_func(
};
- inst(BINARY_OP_MULTIPLY_INT, (left, right, unused/1 -- prod)) {
+ inst(BINARY_OP_MULTIPLY_INT, (unused/1, left, right -- prod)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP);
DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP);
@@ -216,7 +225,7 @@ dummy_func(
ERROR_IF(prod == NULL, error);
}
- inst(BINARY_OP_MULTIPLY_FLOAT, (left, right, unused/1 -- prod)) {
+ inst(BINARY_OP_MULTIPLY_FLOAT, (unused/1, left, right -- prod)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP);
DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP);
@@ -229,7 +238,7 @@ dummy_func(
ERROR_IF(prod == NULL, error);
}
- inst(BINARY_OP_SUBTRACT_INT, (left, right, unused/1 -- sub)) {
+ inst(BINARY_OP_SUBTRACT_INT, (unused/1, left, right -- sub)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP);
DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP);
@@ -240,7 +249,7 @@ dummy_func(
ERROR_IF(sub == NULL, error);
}
- inst(BINARY_OP_SUBTRACT_FLOAT, (left, right, unused/1 -- sub)) {
+ inst(BINARY_OP_SUBTRACT_FLOAT, (unused/1, left, right -- sub)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP);
DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP);
@@ -252,7 +261,7 @@ dummy_func(
ERROR_IF(sub == NULL, error);
}
- inst(BINARY_OP_ADD_UNICODE, (left, right, unused/1 -- res)) {
+ inst(BINARY_OP_ADD_UNICODE, (unused/1, left, right -- res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP);
DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP);
@@ -299,7 +308,7 @@ dummy_func(
JUMPBY(INLINE_CACHE_ENTRIES_BINARY_OP + 1);
}
- inst(BINARY_OP_ADD_FLOAT, (left, right, unused/1 -- sum)) {
+ inst(BINARY_OP_ADD_FLOAT, (unused/1, left, right -- sum)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP);
DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP);
@@ -312,7 +321,7 @@ dummy_func(
ERROR_IF(sum == NULL, error);
}
- inst(BINARY_OP_ADD_INT, (left, right, unused/1 -- sum)) {
+ inst(BINARY_OP_ADD_INT, (unused/1, left, right -- sum)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP);
DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP);
@@ -331,7 +340,7 @@ dummy_func(
BINARY_SUBSCR_TUPLE_INT,
};
- inst(BINARY_SUBSCR, (container, sub, unused/4 -- res)) {
+ inst(BINARY_SUBSCR, (unused/4, container, sub -- res)) {
_PyBinarySubscrCache *cache = (_PyBinarySubscrCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
assert(cframe.use_tracing == 0);
@@ -377,7 +386,7 @@ dummy_func(
ERROR_IF(err, error);
}
- inst(BINARY_SUBSCR_LIST_INT, (list, sub, unused/4 -- res)) {
+ inst(BINARY_SUBSCR_LIST_INT, (unused/4, list, sub -- res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR);
DEOPT_IF(!PyList_CheckExact(list), BINARY_SUBSCR);
@@ -396,7 +405,7 @@ dummy_func(
Py_DECREF(list);
}
- inst(BINARY_SUBSCR_TUPLE_INT, (tuple, sub, unused/4 -- res)) {
+ inst(BINARY_SUBSCR_TUPLE_INT, (unused/4, tuple, sub -- res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR);
DEOPT_IF(!PyTuple_CheckExact(tuple), BINARY_SUBSCR);
@@ -415,7 +424,7 @@ dummy_func(
Py_DECREF(tuple);
}
- inst(BINARY_SUBSCR_DICT, (dict, sub, unused/4 -- res)) {
+ inst(BINARY_SUBSCR_DICT, (unused/4, dict, sub -- res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyDict_CheckExact(dict), BINARY_SUBSCR);
STAT_INC(BINARY_SUBSCR, hit);
@@ -426,14 +435,14 @@ dummy_func(
}
Py_DECREF(dict);
Py_DECREF(sub);
- ERROR_IF(1, error);
+ ERROR_IF(true, error);
}
Py_INCREF(res); // Do this before DECREF'ing dict, sub
Py_DECREF(dict);
Py_DECREF(sub);
}
- inst(BINARY_SUBSCR_GETITEM, (container, sub, unused/1, type_version/2, func_version/1 -- unused)) {
+ inst(BINARY_SUBSCR_GETITEM, (unused/1, type_version/2, func_version/1, container, sub -- unused)) {
PyTypeObject *tp = Py_TYPE(container);
DEOPT_IF(tp->tp_version_tag != type_version, BINARY_SUBSCR);
assert(tp->tp_flags & Py_TPFLAGS_HEAPTYPE);
@@ -457,52 +466,48 @@ dummy_func(
DISPATCH_INLINED(new_frame);
}
- // stack effect: (__0 -- )
- inst(LIST_APPEND) {
- PyObject *v = POP();
- PyObject *list = PEEK(oparg);
- if (_PyList_AppendTakeRef((PyListObject *)list, v) < 0)
- goto error;
+ // Alternative: (list, unused[oparg], v -- list, unused[oparg])
+ inst(LIST_APPEND, (v --)) {
+ PyObject *list = PEEK(oparg + 1); // +1 to account for v staying on stack
+ ERROR_IF(_PyList_AppendTakeRef((PyListObject *)list, v) < 0, error);
PREDICT(JUMP_BACKWARD);
}
- // stack effect: (__0 -- )
- inst(SET_ADD) {
- PyObject *v = POP();
- PyObject *set = PEEK(oparg);
- int err;
- err = PySet_Add(set, v);
+ // Alternative: (set, unused[oparg], v -- set, unused[oparg])
+ inst(SET_ADD, (v --)) {
+ PyObject *set = PEEK(oparg + 1); // +1 to account for v staying on stack
+ int err = PySet_Add(set, v);
Py_DECREF(v);
- if (err != 0)
- goto error;
+ ERROR_IF(err, error);
PREDICT(JUMP_BACKWARD);
}
- inst(STORE_SUBSCR, (v, container, sub -- )) {
- _PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)next_instr;
- if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
+ family(store_subscr) = {
+ STORE_SUBSCR,
+ STORE_SUBSCR_DICT,
+ STORE_SUBSCR_LIST_INT,
+ };
+
+ inst(STORE_SUBSCR, (counter/1, v, container, sub -- )) {
+ if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
assert(cframe.use_tracing == 0);
next_instr--;
_Py_Specialize_StoreSubscr(container, sub, next_instr);
DISPATCH_SAME_OPARG();
}
STAT_INC(STORE_SUBSCR, deferred);
+ _PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)next_instr;
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
/* container[sub] = v */
int err = PyObject_SetItem(container, sub, v);
Py_DECREF(v);
Py_DECREF(container);
Py_DECREF(sub);
- ERROR_IF(err != 0, error);
- JUMPBY(INLINE_CACHE_ENTRIES_STORE_SUBSCR);
+ ERROR_IF(err, error);
}
- // stack effect: (__0, __1, __2 -- )
- inst(STORE_SUBSCR_LIST_INT) {
+ inst(STORE_SUBSCR_LIST_INT, (unused/1, value, list, sub -- )) {
assert(cframe.use_tracing == 0);
- PyObject *sub = TOP();
- PyObject *list = SECOND();
- PyObject *value = THIRD();
DEOPT_IF(!PyLong_CheckExact(sub), STORE_SUBSCR);
DEOPT_IF(!PyList_CheckExact(list), STORE_SUBSCR);
@@ -515,60 +520,42 @@ dummy_func(
PyObject *old_value = PyList_GET_ITEM(list, index);
PyList_SET_ITEM(list, index, value);
- STACK_SHRINK(3);
assert(old_value != NULL);
Py_DECREF(old_value);
_Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free);
Py_DECREF(list);
- JUMPBY(INLINE_CACHE_ENTRIES_STORE_SUBSCR);
}
- // stack effect: (__0, __1, __2 -- )
- inst(STORE_SUBSCR_DICT) {
+ inst(STORE_SUBSCR_DICT, (unused/1, value, dict, sub -- )) {
assert(cframe.use_tracing == 0);
- PyObject *sub = TOP();
- PyObject *dict = SECOND();
- PyObject *value = THIRD();
DEOPT_IF(!PyDict_CheckExact(dict), STORE_SUBSCR);
- STACK_SHRINK(3);
STAT_INC(STORE_SUBSCR, hit);
int err = _PyDict_SetItem_Take2((PyDictObject *)dict, sub, value);
Py_DECREF(dict);
- if (err != 0) {
- goto error;
- }
- JUMPBY(INLINE_CACHE_ENTRIES_STORE_SUBSCR);
+ ERROR_IF(err, error);
}
- // stack effect: (__0, __1 -- )
- inst(DELETE_SUBSCR) {
- PyObject *sub = TOP();
- PyObject *container = SECOND();
- int err;
- STACK_SHRINK(2);
+ inst(DELETE_SUBSCR, (container, sub --)) {
/* del container[sub] */
- err = PyObject_DelItem(container, sub);
+ int err = PyObject_DelItem(container, sub);
Py_DECREF(container);
Py_DECREF(sub);
- if (err != 0)
- goto error;
+ ERROR_IF(err, error);
}
- // stack effect: (__0 -- )
- inst(PRINT_EXPR) {
- PyObject *value = POP();
+ inst(PRINT_EXPR, (value --)) {
PyObject *hook = _PySys_GetAttr(tstate, &_Py_ID(displayhook));
PyObject *res;
+ // Can't use ERROR_IF here.
if (hook == NULL) {
_PyErr_SetString(tstate, PyExc_RuntimeError,
"lost sys.displayhook");
Py_DECREF(value);
- goto error;
+ ERROR_IF(true, error);
}
res = PyObject_CallOneArg(hook, value);
Py_DECREF(value);
- if (res == NULL)
- goto error;
+ ERROR_IF(res == NULL, error);
Py_DECREF(res);
}
@@ -595,11 +582,10 @@ dummy_func(
goto error;
}
- // stack effect: (__0 -- )
- inst(INTERPRETER_EXIT) {
+ inst(INTERPRETER_EXIT, (retval --)) {
assert(frame == &entry_frame);
assert(_PyFrame_IsIncomplete(frame));
- PyObject *retval = POP();
+ STACK_SHRINK(1); // Since we're not going to DISPATCH()
assert(EMPTY());
/* Restore previous cframe and return. */
tstate->cframe = cframe.previous;
@@ -610,62 +596,53 @@ dummy_func(
return retval;
}
- // stack effect: (__0 -- )
- inst(RETURN_VALUE) {
- PyObject *retval = POP();
+ inst(RETURN_VALUE, (retval --)) {
+ STACK_SHRINK(1);
assert(EMPTY());
_PyFrame_SetStackPointer(frame, stack_pointer);
TRACE_FUNCTION_EXIT();
DTRACE_FUNCTION_EXIT();
_Py_LeaveRecursiveCallPy(tstate);
assert(frame != &entry_frame);
- frame = cframe.current_frame = pop_frame(tstate, frame);
+ // GH-99729: We need to unlink the frame *before* clearing it:
+ _PyInterpreterFrame *dying = frame;
+ frame = cframe.current_frame = dying->previous;
+ _PyEvalFrameClearAndPop(tstate, dying);
_PyFrame_StackPush(frame, retval);
goto resume_frame;
}
- // stack effect: ( -- )
- inst(GET_AITER) {
+ inst(GET_AITER, (obj -- iter)) {
unaryfunc getter = NULL;
- PyObject *iter = NULL;
- PyObject *obj = TOP();
PyTypeObject *type = Py_TYPE(obj);
if (type->tp_as_async != NULL) {
getter = type->tp_as_async->am_aiter;
}
- if (getter != NULL) {
- iter = (*getter)(obj);
- Py_DECREF(obj);
- if (iter == NULL) {
- SET_TOP(NULL);
- goto error;
- }
- }
- else {
- SET_TOP(NULL);
+ if (getter == NULL) {
_PyErr_Format(tstate, PyExc_TypeError,
"'async for' requires an object with "
"__aiter__ method, got %.100s",
type->tp_name);
Py_DECREF(obj);
- goto error;
+ ERROR_IF(true, error);
}
+ iter = (*getter)(obj);
+ Py_DECREF(obj);
+ ERROR_IF(iter == NULL, error);
+
if (Py_TYPE(iter)->tp_as_async == NULL ||
Py_TYPE(iter)->tp_as_async->am_anext == NULL) {
- SET_TOP(NULL);
_PyErr_Format(tstate, PyExc_TypeError,
"'async for' received an object from __aiter__ "
"that does not implement __anext__: %.100s",
Py_TYPE(iter)->tp_name);
Py_DECREF(iter);
- goto error;
+ ERROR_IF(true, error);
}
-
- SET_TOP(iter);
}
// stack effect: ( -- __0)
@@ -1116,53 +1093,43 @@ dummy_func(
Py_DECREF(seq);
}
- // stack effect: (__0, __1 -- )
- inst(STORE_ATTR) {
- _PyAttrCache *cache = (_PyAttrCache *)next_instr;
- if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
+ family(store_attr) = {
+ STORE_ATTR,
+ STORE_ATTR_INSTANCE_VALUE,
+ STORE_ATTR_SLOT,
+ STORE_ATTR_WITH_HINT,
+ };
+
+ inst(STORE_ATTR, (counter/1, unused/3, v, owner --)) {
+ if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
assert(cframe.use_tracing == 0);
- PyObject *owner = TOP();
PyObject *name = GETITEM(names, oparg);
next_instr--;
_Py_Specialize_StoreAttr(owner, next_instr, name);
DISPATCH_SAME_OPARG();
}
STAT_INC(STORE_ATTR, deferred);
+ _PyAttrCache *cache = (_PyAttrCache *)next_instr;
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
PyObject *name = GETITEM(names, oparg);
- PyObject *owner = TOP();
- PyObject *v = SECOND();
- int err;
- STACK_SHRINK(2);
- err = PyObject_SetAttr(owner, name, v);
+ int err = PyObject_SetAttr(owner, name, v);
Py_DECREF(v);
Py_DECREF(owner);
- if (err != 0) {
- goto error;
- }
- JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR);
+ ERROR_IF(err, error);
}
- // stack effect: (__0 -- )
- inst(DELETE_ATTR) {
+ inst(DELETE_ATTR, (owner --)) {
PyObject *name = GETITEM(names, oparg);
- PyObject *owner = POP();
- int err;
- err = PyObject_SetAttr(owner, name, (PyObject *)NULL);
+ int err = PyObject_SetAttr(owner, name, (PyObject *)NULL);
Py_DECREF(owner);
- if (err != 0)
- goto error;
+ ERROR_IF(err, error);
}
- // stack effect: (__0 -- )
- inst(STORE_GLOBAL) {
+ inst(STORE_GLOBAL, (v --)) {
PyObject *name = GETITEM(names, oparg);
- PyObject *v = POP();
- int err;
- err = PyDict_SetItem(GLOBALS(), name, v);
+ int err = PyDict_SetItem(GLOBALS(), name, v);
Py_DECREF(v);
- if (err != 0)
- goto error;
+ ERROR_IF(err, error);
}
inst(DELETE_GLOBAL, (--)) {
@@ -1951,22 +1918,15 @@ dummy_func(
DISPATCH_INLINED(new_frame);
}
- // stack effect: (__0, __1 -- )
- inst(STORE_ATTR_INSTANCE_VALUE) {
+ inst(STORE_ATTR_INSTANCE_VALUE, (unused/1, type_version/2, index/1, value, owner --)) {
assert(cframe.use_tracing == 0);
- PyObject *owner = TOP();
PyTypeObject *tp = Py_TYPE(owner);
- _PyAttrCache *cache = (_PyAttrCache *)next_instr;
- uint32_t type_version = read_u32(cache->version);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR);
assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT);
PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner);
DEOPT_IF(!_PyDictOrValues_IsValues(dorv), STORE_ATTR);
STAT_INC(STORE_ATTR, hit);
- Py_ssize_t index = cache->index;
- STACK_SHRINK(1);
- PyObject *value = POP();
PyDictValues *values = _PyDictOrValues_GetValues(dorv);
PyObject *old_value = values->values[index];
values->values[index] = value;
@@ -1977,16 +1937,11 @@ dummy_func(
Py_DECREF(old_value);
}
Py_DECREF(owner);
- JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR);
}
- // stack effect: (__0, __1 -- )
- inst(STORE_ATTR_WITH_HINT) {
+ inst(STORE_ATTR_WITH_HINT, (unused/1, type_version/2, hint/1, value, owner --)) {
assert(cframe.use_tracing == 0);
- PyObject *owner = TOP();
PyTypeObject *tp = Py_TYPE(owner);
- _PyAttrCache *cache = (_PyAttrCache *)next_instr;
- uint32_t type_version = read_u32(cache->version);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR);
assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT);
@@ -1996,17 +1951,14 @@ dummy_func(
DEOPT_IF(dict == NULL, STORE_ATTR);
assert(PyDict_CheckExact((PyObject *)dict));
PyObject *name = GETITEM(names, oparg);
- uint16_t hint = cache->index;
DEOPT_IF(hint >= (size_t)dict->ma_keys->dk_nentries, STORE_ATTR);
- PyObject *value, *old_value;
+ PyObject *old_value;
uint64_t new_version;
if (DK_IS_UNICODE(dict->ma_keys)) {
PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint;
DEOPT_IF(ep->me_key != name, STORE_ATTR);
old_value = ep->me_value;
DEOPT_IF(old_value == NULL, STORE_ATTR);
- STACK_SHRINK(1);
- value = POP();
new_version = _PyDict_NotifyEvent(PyDict_EVENT_MODIFIED, dict, name, value);
ep->me_value = value;
}
@@ -2015,8 +1967,6 @@ dummy_func(
DEOPT_IF(ep->me_key != name, STORE_ATTR);
old_value = ep->me_value;
DEOPT_IF(old_value == NULL, STORE_ATTR);
- STACK_SHRINK(1);
- value = POP();
new_version = _PyDict_NotifyEvent(PyDict_EVENT_MODIFIED, dict, name, value);
ep->me_value = value;
}
@@ -2029,36 +1979,32 @@ dummy_func(
/* PEP 509 */
dict->ma_version_tag = new_version;
Py_DECREF(owner);
- JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR);
}
- // stack effect: (__0, __1 -- )
- inst(STORE_ATTR_SLOT) {
+ inst(STORE_ATTR_SLOT, (unused/1, type_version/2, index/1, value, owner --)) {
assert(cframe.use_tracing == 0);
- PyObject *owner = TOP();
PyTypeObject *tp = Py_TYPE(owner);
- _PyAttrCache *cache = (_PyAttrCache *)next_instr;
- uint32_t type_version = read_u32(cache->version);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR);
- char *addr = (char *)owner + cache->index;
+ char *addr = (char *)owner + index;
STAT_INC(STORE_ATTR, hit);
- STACK_SHRINK(1);
- PyObject *value = POP();
PyObject *old_value = *(PyObject **)addr;
*(PyObject **)addr = value;
Py_XDECREF(old_value);
Py_DECREF(owner);
- JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR);
}
- // stack effect: (__0 -- )
- inst(COMPARE_OP) {
+ family(compare_op) = {
+ COMPARE_OP,
+ _COMPARE_OP_FLOAT,
+ _COMPARE_OP_INT,
+ _COMPARE_OP_STR,
+ };
+
+ inst(COMPARE_OP, (unused/2, left, right -- res)) {
_PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
assert(cframe.use_tracing == 0);
- PyObject *right = TOP();
- PyObject *left = SECOND();
next_instr--;
_Py_Specialize_CompareOp(left, right, next_instr, oparg);
DISPATCH_SAME_OPARG();
@@ -2066,57 +2012,43 @@ dummy_func(
STAT_INC(COMPARE_OP, deferred);
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
assert(oparg <= Py_GE);
- PyObject *right = POP();
- PyObject *left = TOP();
- PyObject *res = PyObject_RichCompare(left, right, oparg);
- SET_TOP(res);
+ res = PyObject_RichCompare(left, right, oparg);
Py_DECREF(left);
Py_DECREF(right);
- if (res == NULL) {
- goto error;
- }
- JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP);
+ ERROR_IF(res == NULL, error);
}
- // stack effect: (__0 -- )
- inst(COMPARE_OP_FLOAT_JUMP) {
+ // The result is an int disguised as an object pointer.
+ op(_COMPARE_OP_FLOAT, (unused/1, when_to_jump_mask/1, left, right -- jump: size_t)) {
assert(cframe.use_tracing == 0);
// Combined: COMPARE_OP (float ? float) + POP_JUMP_IF_(true/false)
- _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr;
- int when_to_jump_mask = cache->mask;
- PyObject *right = TOP();
- PyObject *left = SECOND();
DEOPT_IF(!PyFloat_CheckExact(left), COMPARE_OP);
DEOPT_IF(!PyFloat_CheckExact(right), COMPARE_OP);
double dleft = PyFloat_AS_DOUBLE(left);
double dright = PyFloat_AS_DOUBLE(right);
- int sign = (dleft > dright) - (dleft < dright);
+ // 1 if <, 2 if ==, 4 if >; this matches when _to_jump_mask
+ int sign_ish = 2*(dleft > dright) + 2 - (dleft < dright);
DEOPT_IF(isnan(dleft), COMPARE_OP);
DEOPT_IF(isnan(dright), COMPARE_OP);
STAT_INC(COMPARE_OP, hit);
- JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP);
- NEXTOPARG();
- STACK_SHRINK(2);
_Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc);
_Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc);
+ jump = sign_ish & when_to_jump_mask;
+ }
+ // The input is an int disguised as an object pointer!
+ op(_JUMP_IF, (jump: size_t --)) {
assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE);
- int jump = (1 << (sign + 1)) & when_to_jump_mask;
- if (!jump) {
- next_instr++;
- }
- else {
- JUMPBY(1 + oparg);
+ if (jump) {
+ JUMPBY(oparg);
}
}
+ // We're praying that the compiler optimizes the flags manipuations.
+ super(COMPARE_OP_FLOAT_JUMP) = _COMPARE_OP_FLOAT + _JUMP_IF;
- // stack effect: (__0 -- )
- inst(COMPARE_OP_INT_JUMP) {
+ // Similar to COMPARE_OP_FLOAT
+ op(_COMPARE_OP_INT, (unused/1, when_to_jump_mask/1, left, right -- jump: size_t)) {
assert(cframe.use_tracing == 0);
// Combined: COMPARE_OP (int ? int) + POP_JUMP_IF_(true/false)
- _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr;
- int when_to_jump_mask = cache->mask;
- PyObject *right = TOP();
- PyObject *left = SECOND();
DEOPT_IF(!PyLong_CheckExact(left), COMPARE_OP);
DEOPT_IF(!PyLong_CheckExact(right), COMPARE_OP);
DEOPT_IF((size_t)(Py_SIZE(left) + 1) > 2, COMPARE_OP);
@@ -2125,51 +2057,30 @@ dummy_func(
assert(Py_ABS(Py_SIZE(left)) <= 1 && Py_ABS(Py_SIZE(right)) <= 1);
Py_ssize_t ileft = Py_SIZE(left) * ((PyLongObject *)left)->ob_digit[0];
Py_ssize_t iright = Py_SIZE(right) * ((PyLongObject *)right)->ob_digit[0];
- int sign = (ileft > iright) - (ileft < iright);
- JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP);
- NEXTOPARG();
- STACK_SHRINK(2);
+ // 1 if <, 2 if ==, 4 if >; this matches when _to_jump_mask
+ int sign_ish = 2*(ileft > iright) + 2 - (ileft < iright);
_Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free);
_Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free);
- assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE);
- int jump = (1 << (sign + 1)) & when_to_jump_mask;
- if (!jump) {
- next_instr++;
- }
- else {
- JUMPBY(1 + oparg);
- }
+ jump = sign_ish & when_to_jump_mask;
}
+ super(COMPARE_OP_INT_JUMP) = _COMPARE_OP_INT + _JUMP_IF;
- // stack effect: (__0 -- )
- inst(COMPARE_OP_STR_JUMP) {
+ // Similar to COMPARE_OP_FLOAT, but for ==, != only
+ op(_COMPARE_OP_STR, (unused/1, invert/1, left, right -- jump: size_t)) {
assert(cframe.use_tracing == 0);
// Combined: COMPARE_OP (str == str or str != str) + POP_JUMP_IF_(true/false)
- _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr;
- int invert = cache->mask;
- PyObject *right = TOP();
- PyObject *left = SECOND();
DEOPT_IF(!PyUnicode_CheckExact(left), COMPARE_OP);
DEOPT_IF(!PyUnicode_CheckExact(right), COMPARE_OP);
STAT_INC(COMPARE_OP, hit);
int res = _PyUnicode_Equal(left, right);
assert(oparg == Py_EQ || oparg == Py_NE);
- JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP);
- NEXTOPARG();
- assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE);
- STACK_SHRINK(2);
_Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc);
_Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc);
assert(res == 0 || res == 1);
assert(invert == 0 || invert == 1);
- int jump = res ^ invert;
- if (!jump) {
- next_instr++;
- }
- else {
- JUMPBY(1 + oparg);
- }
+ jump = res ^ invert;
}
+ super(COMPARE_OP_STR_JUMP) = _COMPARE_OP_STR + _JUMP_IF;
// stack effect: (__0 -- )
inst(IS_OP) {
@@ -2612,6 +2523,29 @@ dummy_func(
end_for_iter_list:
}
+ // stack effect: ( -- __0)
+ inst(FOR_ITER_TUPLE) {
+ assert(cframe.use_tracing == 0);
+ _PyTupleIterObject *it = (_PyTupleIterObject *)TOP();
+ DEOPT_IF(Py_TYPE(it) != &PyTupleIter_Type, FOR_ITER);
+ STAT_INC(FOR_ITER, hit);
+ PyTupleObject *seq = it->it_seq;
+ if (seq) {
+ if (it->it_index < PyTuple_GET_SIZE(seq)) {
+ PyObject *next = PyTuple_GET_ITEM(seq, it->it_index++);
+ PUSH(Py_NewRef(next));
+ JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER);
+ goto end_for_iter_tuple; // End of this instruction
+ }
+ it->it_seq = NULL;
+ Py_DECREF(seq);
+ }
+ STACK_SHRINK(1);
+ Py_DECREF(it);
+ JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1);
+ end_for_iter_tuple:
+ }
+
// stack effect: ( -- __0)
inst(FOR_ITER_RANGE) {
assert(cframe.use_tracing == 0);
@@ -3518,6 +3452,7 @@ dummy_func(
func->func_defaults = POP();
}
+ func->func_version = ((PyCodeObject *)codeobj)->co_version;
PUSH((PyObject *)func);
}
@@ -3630,7 +3565,7 @@ dummy_func(
PUSH(Py_NewRef(peek));
}
- inst(BINARY_OP, (lhs, rhs, unused/1 -- res)) {
+ inst(BINARY_OP, (unused/1, lhs, rhs -- res)) {
_PyBinaryOpCache *cache = (_PyBinaryOpCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
assert(cframe.use_tracing == 0);
@@ -3688,9 +3623,6 @@ dummy_func(
// Future families go below this point //
-family(binary_subscr) = {
- BINARY_SUBSCR, BINARY_SUBSCR_DICT,
- BINARY_SUBSCR_GETITEM, BINARY_SUBSCR_LIST_INT, BINARY_SUBSCR_TUPLE_INT };
family(call) = {
CALL, CALL_PY_EXACT_ARGS,
CALL_PY_WITH_DEFAULTS, CALL_BOUND_METHOD_EXACT_ARGS, CALL_BUILTIN_CLASS,
@@ -3699,9 +3631,6 @@ family(call) = {
CALL_NO_KW_LIST_APPEND, CALL_NO_KW_METHOD_DESCRIPTOR_FAST, CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS,
CALL_NO_KW_METHOD_DESCRIPTOR_O, CALL_NO_KW_STR_1, CALL_NO_KW_TUPLE_1,
CALL_NO_KW_TYPE_1 };
-family(compare_op) = {
- COMPARE_OP, COMPARE_OP_FLOAT_JUMP,
- COMPARE_OP_INT_JUMP, COMPARE_OP_STR_JUMP };
family(for_iter) = {
FOR_ITER, FOR_ITER_LIST,
FOR_ITER_RANGE };
@@ -3716,13 +3645,7 @@ family(load_fast) = { LOAD_FAST, LOAD_FAST__LOAD_CONST, LOAD_FAST__LOAD_FAST };
family(load_global) = {
LOAD_GLOBAL, LOAD_GLOBAL_BUILTIN,
LOAD_GLOBAL_MODULE };
-family(store_attr) = {
- STORE_ATTR, STORE_ATTR_INSTANCE_VALUE,
- STORE_ATTR_SLOT, STORE_ATTR_WITH_HINT };
family(store_fast) = { STORE_FAST, STORE_FAST__LOAD_FAST, STORE_FAST__STORE_FAST };
-family(store_subscr) = {
- STORE_SUBSCR, STORE_SUBSCR_DICT,
- STORE_SUBSCR_LIST_INT };
family(unpack_sequence) = {
UNPACK_SEQUENCE, UNPACK_SEQUENCE_LIST,
UNPACK_SEQUENCE_TUPLE, UNPACK_SEQUENCE_TWO_TUPLE };
diff --git a/Python/ceval.c b/Python/ceval.c
index 80bfa21ad0b6f0..45f42800d7ce58 100644
--- a/Python/ceval.c
+++ b/Python/ceval.c
@@ -864,7 +864,7 @@ GETITEM(PyObject *v, Py_ssize_t i) {
STAT_INC(opcode, miss); \
STAT_INC((INSTNAME), miss); \
/* The counter is always the first cache entry: */ \
- if (ADAPTIVE_COUNTER_IS_ZERO(*next_instr)) { \
+ if (ADAPTIVE_COUNTER_IS_ZERO(next_instr->cache)) { \
STAT_INC((INSTNAME), deopt); \
} \
else { \
@@ -1009,14 +1009,6 @@ trace_function_exit(PyThreadState *tstate, _PyInterpreterFrame *frame, PyObject
return 0;
}
-static _PyInterpreterFrame *
-pop_frame(PyThreadState *tstate, _PyInterpreterFrame *frame)
-{
- _PyInterpreterFrame *prev_frame = frame->previous;
- _PyEvalFrameClearAndPop(tstate, frame);
- return prev_frame;
-}
-
int _Py_CheckRecursiveCallPy(
PyThreadState *tstate)
@@ -1297,7 +1289,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int
}
opcode = _PyOpcode_Deopt[opcode];
if (_PyOpcode_Caches[opcode]) {
- _Py_CODEUNIT *counter = &next_instr[1];
+ uint16_t *counter = &next_instr[1].cache;
// The instruction is going to decrement the counter, so we need to
// increment it here to make sure it doesn't try to specialize:
if (!ADAPTIVE_COUNTER_IS_MAX(*counter)) {
@@ -1432,7 +1424,10 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int
assert(_PyErr_Occurred(tstate));
_Py_LeaveRecursiveCallPy(tstate);
assert(frame != &entry_frame);
- frame = cframe.current_frame = pop_frame(tstate, frame);
+ // GH-99729: We need to unlink the frame *before* clearing it:
+ _PyInterpreterFrame *dying = frame;
+ frame = cframe.current_frame = dying->previous;
+ _PyEvalFrameClearAndPop(tstate, dying);
if (frame == &entry_frame) {
/* Restore previous cframe and exit */
tstate->cframe = cframe.previous;
diff --git a/Python/clinic/bltinmodule.c.h b/Python/clinic/bltinmodule.c.h
index 19930a519be089..89f069dd97f6ea 100644
--- a/Python/clinic/bltinmodule.c.h
+++ b/Python/clinic/bltinmodule.c.h
@@ -354,8 +354,8 @@ builtin_compile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObj
}
}
if (args[4]) {
- dont_inherit = _PyLong_AsInt(args[4]);
- if (dont_inherit == -1 && PyErr_Occurred()) {
+ dont_inherit = PyObject_IsTrue(args[4]);
+ if (dont_inherit < 0) {
goto exit;
}
if (!--noptargs) {
@@ -1215,4 +1215,4 @@ builtin_issubclass(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
exit:
return return_value;
}
-/*[clinic end generated code: output=3c9497e0ffeb8a30 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=973da43fa65aa727 input=a9049054013a1b77]*/
diff --git a/Python/compile.c b/Python/compile.c
index e200c5abb59853..09eb4016940d80 100644
--- a/Python/compile.c
+++ b/Python/compile.c
@@ -55,6 +55,16 @@
*/
#define STACK_USE_GUIDELINE 30
+#undef SUCCESS
+#undef ERROR
+#define SUCCESS 0
+#define ERROR -1
+
+#define RETURN_IF_ERROR(X) \
+ if ((X) == -1) { \
+ return ERROR; \
+ }
+
/* If we exceed this limit, it should
* be considered a compiler bug.
* Currently it should be impossible
@@ -253,22 +263,32 @@ write_instr(_Py_CODEUNIT *codestr, struct instr *instruction, int ilen)
int caches = _PyOpcode_Caches[opcode];
switch (ilen - caches) {
case 4:
- *codestr++ = _Py_MAKECODEUNIT(EXTENDED_ARG, (oparg >> 24) & 0xFF);
+ codestr->opcode = EXTENDED_ARG;
+ codestr->oparg = (oparg >> 24) & 0xFF;
+ codestr++;
/* fall through */
case 3:
- *codestr++ = _Py_MAKECODEUNIT(EXTENDED_ARG, (oparg >> 16) & 0xFF);
+ codestr->opcode = EXTENDED_ARG;
+ codestr->oparg = (oparg >> 16) & 0xFF;
+ codestr++;
/* fall through */
case 2:
- *codestr++ = _Py_MAKECODEUNIT(EXTENDED_ARG, (oparg >> 8) & 0xFF);
+ codestr->opcode = EXTENDED_ARG;
+ codestr->oparg = (oparg >> 8) & 0xFF;
+ codestr++;
/* fall through */
case 1:
- *codestr++ = _Py_MAKECODEUNIT(opcode, oparg & 0xFF);
+ codestr->opcode = opcode;
+ codestr->oparg = oparg & 0xFF;
+ codestr++;
break;
default:
Py_UNREACHABLE();
}
while (caches--) {
- *codestr++ = _Py_MAKECODEUNIT(CACHE, 0);
+ codestr->opcode = CACHE;
+ codestr->oparg = 0;
+ codestr++;
}
}
@@ -498,7 +518,7 @@ static int compiler_annassign(struct compiler *, stmt_ty);
static int compiler_subscript(struct compiler *, expr_ty);
static int compiler_slice(struct compiler *, expr_ty);
-static int are_all_items_const(asdl_expr_seq *, Py_ssize_t, Py_ssize_t);
+static bool are_all_items_const(asdl_expr_seq *, Py_ssize_t, Py_ssize_t);
static int compiler_with(struct compiler *, stmt_ty, int);
@@ -610,18 +630,18 @@ compiler_setup(struct compiler *c, mod_ty mod, PyObject *filename,
{
c->c_const_cache = PyDict_New();
if (!c->c_const_cache) {
- return 0;
+ return ERROR;
}
c->c_stack = PyList_New(0);
if (!c->c_stack) {
- return 0;
+ return ERROR;
}
c->c_filename = Py_NewRef(filename);
c->c_arena = arena;
if (!_PyFuture_FromAST(mod, filename, &c->c_future)) {
- return 0;
+ return ERROR;
}
int merged = c->c_future.ff_features | flags.cf_flags;
c->c_future.ff_features = merged;
@@ -635,16 +655,16 @@ compiler_setup(struct compiler *c, mod_ty mod, PyObject *filename,
state.ff_features = merged;
if (!_PyAST_Optimize(mod, arena, &state)) {
- return 0;
+ return ERROR;
}
c->c_st = _PySymtable_Build(mod, filename, &c->c_future);
if (c->c_st == NULL) {
if (!PyErr_Occurred()) {
PyErr_SetString(PyExc_SystemError, "no symtable");
}
- return 0;
+ return ERROR;
}
- return 1;
+ return SUCCESS;
}
static struct compiler*
@@ -656,7 +676,7 @@ new_compiler(mod_ty mod, PyObject *filename, PyCompilerFlags *pflags,
if (c == NULL) {
return NULL;
}
- if (!compiler_setup(c, mod, filename, flags, optimize, arena)) {
+ if (compiler_setup(c, mod, filename, flags, optimize, arena) < 0) {
compiler_free(c);
return NULL;
}
@@ -800,11 +820,12 @@ cfg_builder_init(cfg_builder *g)
{
g->g_block_list = NULL;
basicblock *block = cfg_builder_new_block(g);
- if (block == NULL)
- return 0;
+ if (block == NULL) {
+ return ERROR;
+ }
g->g_curblock = g->g_entryblock = block;
g->g_current_label = NO_LABEL;
- return 1;
+ return SUCCESS;
}
static void
@@ -862,8 +883,10 @@ compiler_set_qualname(struct compiler *c)
|| u->u_scope_type == COMPILER_SCOPE_CLASS) {
assert(u->u_name);
mangled = _Py_Mangle(parent->u_private, u->u_name);
- if (!mangled)
- return 0;
+ if (!mangled) {
+ return ERROR;
+ }
+
scope = _PyST_GetScope(parent->u_ste, mangled);
Py_DECREF(mangled);
assert(scope != GLOBAL_IMPLICIT);
@@ -879,8 +902,9 @@ compiler_set_qualname(struct compiler *c)
_Py_DECLARE_STR(dot_locals, ".");
base = PyUnicode_Concat(parent->u_qualname,
&_Py_STR(dot_locals));
- if (base == NULL)
- return 0;
+ if (base == NULL) {
+ return ERROR;
+ }
}
else {
base = Py_NewRef(parent->u_qualname);
@@ -892,18 +916,20 @@ compiler_set_qualname(struct compiler *c)
_Py_DECLARE_STR(dot, ".");
name = PyUnicode_Concat(base, &_Py_STR(dot));
Py_DECREF(base);
- if (name == NULL)
- return 0;
+ if (name == NULL) {
+ return ERROR;
+ }
PyUnicode_Append(&name, u->u_name);
- if (name == NULL)
- return 0;
+ if (name == NULL) {
+ return ERROR;
+ }
}
else {
name = Py_NewRef(u->u_name);
}
u->u_qualname = name;
- return 1;
+ return SUCCESS;
}
static jump_target_label
@@ -1304,10 +1330,6 @@ PyCompile_OpcodeStackEffect(int opcode, int oparg)
return stack_effect(opcode, oparg, -1);
}
-/* Add an opcode with no argument.
- Returns 0 on failure, 1 on success.
-*/
-
static int
basicblock_addop(basicblock *b, int opcode, int oparg, location loc)
{
@@ -1318,7 +1340,7 @@ basicblock_addop(basicblock *b, int opcode, int oparg, location loc)
int off = basicblock_next_instr(b);
if (off < 0) {
- return 0;
+ return ERROR;
}
struct instr *i = &b->b_instr[off];
i->i_opcode = opcode;
@@ -1326,7 +1348,7 @@ basicblock_addop(basicblock *b, int opcode, int oparg, location loc)
i->i_target = NULL;
i->i_loc = loc;
- return 1;
+ return SUCCESS;
}
static bool
@@ -1522,8 +1544,9 @@ static int
compiler_addop_load_const(struct compiler *c, location loc, PyObject *o)
{
Py_ssize_t arg = compiler_add_const(c, o);
- if (arg < 0)
- return 0;
+ if (arg < 0) {
+ return ERROR;
+ }
return cfg_builder_addop_i(CFG_BUILDER(c), LOAD_CONST, arg, loc);
}
@@ -1532,8 +1555,9 @@ compiler_addop_o(struct compiler *c, location loc,
int opcode, PyObject *dict, PyObject *o)
{
Py_ssize_t arg = dict_add_o(dict, o);
- if (arg < 0)
- return 0;
+ if (arg < 0) {
+ return ERROR;
+ }
return cfg_builder_addop_i(CFG_BUILDER(c), opcode, arg, loc);
}
@@ -1544,12 +1568,14 @@ compiler_addop_name(struct compiler *c, location loc,
Py_ssize_t arg;
PyObject *mangled = _Py_Mangle(c->u->u_private, o);
- if (!mangled)
- return 0;
+ if (!mangled) {
+ return ERROR;
+ }
arg = dict_add_o(dict, mangled);
Py_DECREF(mangled);
- if (arg < 0)
- return 0;
+ if (arg < 0) {
+ return ERROR;
+ }
if (opcode == LOAD_ATTR) {
arg <<= 1;
}
@@ -1561,9 +1587,7 @@ compiler_addop_name(struct compiler *c, location loc,
return cfg_builder_addop_i(CFG_BUILDER(c), opcode, arg, loc);
}
-/* Add an opcode with an integer argument.
- Returns 0 on failure, 1 on success.
-*/
+/* Add an opcode with an integer argument */
static int
cfg_builder_addop_i(cfg_builder *g, int opcode, Py_ssize_t oparg, location loc)
{
@@ -1588,94 +1612,79 @@ cfg_builder_addop_j(cfg_builder *g, location loc,
return cfg_builder_addop(g, opcode, target.id, loc);
}
-
-#define ADDOP(C, LOC, OP) { \
- if (!cfg_builder_addop_noarg(CFG_BUILDER(C), (OP), (LOC))) \
- return 0; \
-}
+#define ADDOP(C, LOC, OP) \
+ RETURN_IF_ERROR(cfg_builder_addop_noarg(CFG_BUILDER(C), (OP), (LOC)))
#define ADDOP_IN_SCOPE(C, LOC, OP) { \
- if (!cfg_builder_addop_noarg(CFG_BUILDER(C), (OP), (LOC))) { \
+ if (cfg_builder_addop_noarg(CFG_BUILDER(C), (OP), (LOC)) < 0) { \
compiler_exit_scope(c); \
- return 0; \
+ return -1; \
} \
}
-#define ADDOP_LOAD_CONST(C, LOC, O) { \
- if (!compiler_addop_load_const((C), (LOC), (O))) \
- return 0; \
-}
+#define ADDOP_LOAD_CONST(C, LOC, O) \
+ RETURN_IF_ERROR(compiler_addop_load_const((C), (LOC), (O)))
/* Same as ADDOP_LOAD_CONST, but steals a reference. */
#define ADDOP_LOAD_CONST_NEW(C, LOC, O) { \
PyObject *__new_const = (O); \
if (__new_const == NULL) { \
- return 0; \
+ return ERROR; \
} \
- if (!compiler_addop_load_const((C), (LOC), __new_const)) { \
+ if (compiler_addop_load_const((C), (LOC), __new_const) < 0) { \
Py_DECREF(__new_const); \
- return 0; \
+ return ERROR; \
} \
Py_DECREF(__new_const); \
}
#define ADDOP_N(C, LOC, OP, O, TYPE) { \
assert(!HAS_CONST(OP)); /* use ADDOP_LOAD_CONST_NEW */ \
- if (!compiler_addop_o((C), (LOC), (OP), (C)->u->u_ ## TYPE, (O))) { \
+ if (compiler_addop_o((C), (LOC), (OP), (C)->u->u_ ## TYPE, (O)) < 0) { \
Py_DECREF((O)); \
- return 0; \
+ return ERROR; \
} \
Py_DECREF((O)); \
}
-#define ADDOP_NAME(C, LOC, OP, O, TYPE) { \
- if (!compiler_addop_name((C), (LOC), (OP), (C)->u->u_ ## TYPE, (O))) \
- return 0; \
-}
+#define ADDOP_NAME(C, LOC, OP, O, TYPE) \
+ RETURN_IF_ERROR(compiler_addop_name((C), (LOC), (OP), (C)->u->u_ ## TYPE, (O)))
-#define ADDOP_I(C, LOC, OP, O) { \
- if (!cfg_builder_addop_i(CFG_BUILDER(C), (OP), (O), (LOC))) \
- return 0; \
-}
+#define ADDOP_I(C, LOC, OP, O) \
+ RETURN_IF_ERROR(cfg_builder_addop_i(CFG_BUILDER(C), (OP), (O), (LOC)))
-#define ADDOP_JUMP(C, LOC, OP, O) { \
- if (!cfg_builder_addop_j(CFG_BUILDER(C), (LOC), (OP), (O))) \
- return 0; \
-}
+#define ADDOP_JUMP(C, LOC, OP, O) \
+ RETURN_IF_ERROR(cfg_builder_addop_j(CFG_BUILDER(C), (LOC), (OP), (O)))
-#define ADDOP_COMPARE(C, LOC, CMP) { \
- if (!compiler_addcompare((C), (LOC), (cmpop_ty)(CMP))) \
- return 0; \
-}
+#define ADDOP_COMPARE(C, LOC, CMP) \
+ RETURN_IF_ERROR(compiler_addcompare((C), (LOC), (cmpop_ty)(CMP)))
#define ADDOP_BINARY(C, LOC, BINOP) \
- RETURN_IF_FALSE(addop_binary((C), (LOC), (BINOP), false))
+ RETURN_IF_ERROR(addop_binary((C), (LOC), (BINOP), false))
#define ADDOP_INPLACE(C, LOC, BINOP) \
- RETURN_IF_FALSE(addop_binary((C), (LOC), (BINOP), true))
+ RETURN_IF_ERROR(addop_binary((C), (LOC), (BINOP), true))
#define ADD_YIELD_FROM(C, LOC, await) \
- RETURN_IF_FALSE(compiler_add_yield_from((C), (LOC), (await)))
+ RETURN_IF_ERROR(compiler_add_yield_from((C), (LOC), (await)))
#define POP_EXCEPT_AND_RERAISE(C, LOC) \
- RETURN_IF_FALSE(compiler_pop_except_and_reraise((C), (LOC)))
+ RETURN_IF_ERROR(compiler_pop_except_and_reraise((C), (LOC)))
#define ADDOP_YIELD(C, LOC) \
- RETURN_IF_FALSE(addop_yield((C), (LOC)))
+ RETURN_IF_ERROR(addop_yield((C), (LOC)))
/* VISIT and VISIT_SEQ takes an ASDL type as their second argument. They use
the ASDL name to synthesize the name of the C type and the visit function.
*/
-#define VISIT(C, TYPE, V) {\
- if (!compiler_visit_ ## TYPE((C), (V))) \
- return 0; \
-}
+#define VISIT(C, TYPE, V) \
+ RETURN_IF_ERROR(compiler_visit_ ## TYPE((C), (V)));
#define VISIT_IN_SCOPE(C, TYPE, V) {\
- if (!compiler_visit_ ## TYPE((C), (V))) { \
+ if (compiler_visit_ ## TYPE((C), (V)) < 0) { \
compiler_exit_scope(c); \
- return 0; \
+ return ERROR; \
} \
}
@@ -1684,8 +1693,8 @@ cfg_builder_addop_j(cfg_builder *g, location loc,
asdl_ ## TYPE ## _seq *seq = (SEQ); /* avoid variable capture */ \
for (_i = 0; _i < asdl_seq_LEN(seq); _i++) { \
TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, _i); \
- if (!compiler_visit_ ## TYPE((C), elt)) \
- return 0; \
+ if (compiler_visit_ ## TYPE((C), elt) < 0) \
+ return ERROR; \
} \
}
@@ -1694,17 +1703,13 @@ cfg_builder_addop_j(cfg_builder *g, location loc,
asdl_ ## TYPE ## _seq *seq = (SEQ); /* avoid variable capture */ \
for (_i = 0; _i < asdl_seq_LEN(seq); _i++) { \
TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, _i); \
- if (!compiler_visit_ ## TYPE((C), elt)) { \
+ if (compiler_visit_ ## TYPE((C), elt) < 0) { \
compiler_exit_scope(c); \
- return 0; \
+ return ERROR; \
} \
} \
}
-#define RETURN_IF_FALSE(X) \
- if (!(X)) { \
- return 0; \
- }
static int
compiler_enter_scope(struct compiler *c, identifier name,
@@ -1718,7 +1723,7 @@ compiler_enter_scope(struct compiler *c, identifier name,
struct compiler_unit));
if (!u) {
PyErr_NoMemory();
- return 0;
+ return ERROR;
}
u->u_scope_type = scope_type;
u->u_argcount = 0;
@@ -1727,14 +1732,14 @@ compiler_enter_scope(struct compiler *c, identifier name,
u->u_ste = PySymtable_Lookup(c->c_st, key);
if (!u->u_ste) {
compiler_unit_free(u);
- return 0;
+ return ERROR;
}
u->u_name = Py_NewRef(name);
u->u_varnames = list2dict(u->u_ste->ste_varnames);
u->u_cellvars = dictbytype(u->u_ste->ste_symbols, CELL, 0, 0);
if (!u->u_varnames || !u->u_cellvars) {
compiler_unit_free(u);
- return 0;
+ return ERROR;
}
if (u->u_ste->ste_needs_class_closure) {
/* Cook up an implicit __class__ cell. */
@@ -1745,7 +1750,7 @@ compiler_enter_scope(struct compiler *c, identifier name,
_PyLong_GetZero());
if (res < 0) {
compiler_unit_free(u);
- return 0;
+ return ERROR;
}
}
@@ -1753,7 +1758,7 @@ compiler_enter_scope(struct compiler *c, identifier name,
PyDict_GET_SIZE(u->u_cellvars));
if (!u->u_freevars) {
compiler_unit_free(u);
- return 0;
+ return ERROR;
}
u->u_nfblocks = 0;
@@ -1761,12 +1766,12 @@ compiler_enter_scope(struct compiler *c, identifier name,
u->u_consts = PyDict_New();
if (!u->u_consts) {
compiler_unit_free(u);
- return 0;
+ return ERROR;
}
u->u_names = PyDict_New();
if (!u->u_names) {
compiler_unit_free(u);
- return 0;
+ return ERROR;
}
u->u_private = NULL;
@@ -1777,7 +1782,7 @@ compiler_enter_scope(struct compiler *c, identifier name,
if (!capsule || PyList_Append(c->c_stack, capsule) < 0) {
Py_XDECREF(capsule);
compiler_unit_free(u);
- return 0;
+ return ERROR;
}
Py_DECREF(capsule);
u->u_private = Py_XNewRef(c->u->u_private);
@@ -1787,23 +1792,20 @@ compiler_enter_scope(struct compiler *c, identifier name,
c->c_nestlevel++;
cfg_builder *g = CFG_BUILDER(c);
- if (!cfg_builder_init(g)) {
- return 0;
- }
+ RETURN_IF_ERROR(cfg_builder_init(g));
if (u->u_scope_type == COMPILER_SCOPE_MODULE) {
loc.lineno = 0;
}
else {
- if (!compiler_set_qualname(c))
- return 0;
+ RETURN_IF_ERROR(compiler_set_qualname(c));
}
ADDOP_I(c, loc, RESUME, 0);
if (u->u_scope_type == COMPILER_SCOPE_MODULE) {
loc.lineno = -1;
}
- return 1;
+ return SUCCESS;
}
static void
@@ -1837,7 +1839,7 @@ compiler_exit_scope(struct compiler *c)
/* Search if variable annotations are present statically in a block. */
-static int
+static bool
find_ann(asdl_stmt_seq *stmts)
{
int i, j, res = 0;
@@ -1847,7 +1849,7 @@ find_ann(asdl_stmt_seq *stmts)
st = (stmt_ty)asdl_seq_GET(stmts, i);
switch (st->kind) {
case AnnAssign_kind:
- return 1;
+ return true;
case For_kind:
res = find_ann(st->v.For.body) ||
find_ann(st->v.For.orelse);
@@ -1875,7 +1877,7 @@ find_ann(asdl_stmt_seq *stmts)
excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET(
st->v.Try.handlers, j);
if (find_ann(handler->v.ExceptHandler.body)) {
- return 1;
+ return true;
}
}
res = find_ann(st->v.Try.body) ||
@@ -1887,7 +1889,7 @@ find_ann(asdl_stmt_seq *stmts)
excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET(
st->v.TryStar.handlers, j);
if (find_ann(handler->v.ExceptHandler.body)) {
- return 1;
+ return true;
}
}
res = find_ann(st->v.TryStar.body) ||
@@ -1895,7 +1897,7 @@ find_ann(asdl_stmt_seq *stmts)
find_ann(st->v.TryStar.orelse);
break;
default:
- res = 0;
+ res = false;
}
if (res) {
break;
@@ -1922,7 +1924,7 @@ compiler_push_fblock(struct compiler *c, location loc,
f->fb_block = block_label;
f->fb_exit = exit;
f->fb_datum = datum;
- return 1;
+ return SUCCESS;
}
static void
@@ -1942,7 +1944,7 @@ compiler_call_exit_with_nones(struct compiler *c, location loc)
ADDOP_LOAD_CONST(c, loc, Py_None);
ADDOP_LOAD_CONST(c, loc, Py_None);
ADDOP_I(c, loc, CALL, 2);
- return 1;
+ return SUCCESS;
}
static int
@@ -1966,7 +1968,7 @@ compiler_add_yield_from(struct compiler *c, location loc, int await)
ADDOP(c, loc, CLEANUP_THROW);
USE_LABEL(c, exit);
- return 1;
+ return SUCCESS;
}
static int
@@ -1982,7 +1984,7 @@ compiler_pop_except_and_reraise(struct compiler *c, location loc)
ADDOP_I(c, loc, COPY, 3);
ADDOP(c, loc, POP_EXCEPT);
ADDOP_I(c, loc, RERAISE, 1);
- return 1;
+ return SUCCESS;
}
/* Unwind a frame block. If preserve_tos is true, the TOS before
@@ -1999,7 +2001,7 @@ compiler_unwind_fblock(struct compiler *c, location *ploc,
case EXCEPTION_HANDLER:
case EXCEPTION_GROUP_HANDLER:
case ASYNC_COMPREHENSION_GENERATOR:
- return 1;
+ return SUCCESS;
case FOR_LOOP:
/* Pop the iterator */
@@ -2007,19 +2009,18 @@ compiler_unwind_fblock(struct compiler *c, location *ploc,
ADDOP_I(c, *ploc, SWAP, 2);
}
ADDOP(c, *ploc, POP_TOP);
- return 1;
+ return SUCCESS;
case TRY_EXCEPT:
ADDOP(c, *ploc, POP_BLOCK);
- return 1;
+ return SUCCESS;
case FINALLY_TRY:
/* This POP_BLOCK gets the line number of the unwinding statement */
ADDOP(c, *ploc, POP_BLOCK);
if (preserve_tos) {
- if (!compiler_push_fblock(c, *ploc, POP_VALUE, NO_LABEL, NO_LABEL, NULL)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_push_fblock(c, *ploc, POP_VALUE, NO_LABEL, NO_LABEL, NULL));
}
/* Emit the finally block */
VISIT_SEQ(c, stmt, info->fb_datum);
@@ -2030,7 +2031,7 @@ compiler_unwind_fblock(struct compiler *c, location *ploc,
* statement causing the unwinding, so make the unwinding
* instruction artificial */
*ploc = NO_LOCATION;
- return 1;
+ return SUCCESS;
case FINALLY_END:
if (preserve_tos) {
@@ -2042,7 +2043,7 @@ compiler_unwind_fblock(struct compiler *c, location *ploc,
}
ADDOP(c, *ploc, POP_BLOCK);
ADDOP(c, *ploc, POP_EXCEPT);
- return 1;
+ return SUCCESS;
case WITH:
case ASYNC_WITH:
@@ -2051,9 +2052,7 @@ compiler_unwind_fblock(struct compiler *c, location *ploc,
if (preserve_tos) {
ADDOP_I(c, *ploc, SWAP, 2);
}
- if(!compiler_call_exit_with_nones(c, *ploc)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_call_exit_with_nones(c, *ploc));
if (info->fb_type == ASYNC_WITH) {
ADDOP_I(c, *ploc, GET_AWAITABLE, 2);
ADDOP_LOAD_CONST(c, *ploc, Py_None);
@@ -2064,7 +2063,7 @@ compiler_unwind_fblock(struct compiler *c, location *ploc,
* statement causing the unwinding, so make the unwinding
* instruction artificial */
*ploc = NO_LOCATION;
- return 1;
+ return SUCCESS;
case HANDLER_CLEANUP: {
if (info->fb_datum) {
@@ -2077,17 +2076,17 @@ compiler_unwind_fblock(struct compiler *c, location *ploc,
ADDOP(c, *ploc, POP_EXCEPT);
if (info->fb_datum) {
ADDOP_LOAD_CONST(c, *ploc, Py_None);
- compiler_nameop(c, *ploc, info->fb_datum, Store);
- compiler_nameop(c, *ploc, info->fb_datum, Del);
+ RETURN_IF_ERROR(compiler_nameop(c, *ploc, info->fb_datum, Store));
+ RETURN_IF_ERROR(compiler_nameop(c, *ploc, info->fb_datum, Del));
}
- return 1;
+ return SUCCESS;
}
case POP_VALUE: {
if (preserve_tos) {
ADDOP_I(c, *ploc, SWAP, 2);
}
ADDOP(c, *ploc, POP_TOP);
- return 1;
+ return SUCCESS;
}
}
Py_UNREACHABLE();
@@ -2099,7 +2098,7 @@ compiler_unwind_fblock_stack(struct compiler *c, location *ploc,
int preserve_tos, struct fblockinfo **loop)
{
if (c->u->u_nfblocks == 0) {
- return 1;
+ return SUCCESS;
}
struct fblockinfo *top = &c->u->u_fblock[c->u->u_nfblocks-1];
if (top->fb_type == EXCEPTION_GROUP_HANDLER) {
@@ -2108,19 +2107,15 @@ compiler_unwind_fblock_stack(struct compiler *c, location *ploc,
}
if (loop != NULL && (top->fb_type == WHILE_LOOP || top->fb_type == FOR_LOOP)) {
*loop = top;
- return 1;
+ return SUCCESS;
}
struct fblockinfo copy = *top;
c->u->u_nfblocks--;
- if (!compiler_unwind_fblock(c, ploc, ©, preserve_tos)) {
- return 0;
- }
- if (!compiler_unwind_fblock_stack(c, ploc, preserve_tos, loop)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_unwind_fblock(c, ploc, ©, preserve_tos));
+ RETURN_IF_ERROR(compiler_unwind_fblock_stack(c, ploc, preserve_tos, loop));
c->u->u_fblock[c->u->u_nfblocks] = copy;
c->u->u_nfblocks++;
- return 1;
+ return SUCCESS;
}
/* Compile a sequence of statements, checking for a docstring
@@ -2145,8 +2140,9 @@ compiler_body(struct compiler *c, location loc, asdl_stmt_seq *stmts)
if (find_ann(stmts)) {
ADDOP(c, loc, SETUP_ANNOTATIONS);
}
- if (!asdl_seq_LEN(stmts))
- return 1;
+ if (!asdl_seq_LEN(stmts)) {
+ return SUCCESS;
+ }
/* if not -OO mode, set docstring */
if (c->c_optimize < 2) {
docstring = _PyAST_GetDocString(stmts);
@@ -2155,29 +2151,29 @@ compiler_body(struct compiler *c, location loc, asdl_stmt_seq *stmts)
st = (stmt_ty)asdl_seq_GET(stmts, 0);
assert(st->kind == Expr_kind);
VISIT(c, expr, st->v.Expr.value);
- if (!compiler_nameop(c, NO_LOCATION, &_Py_ID(__doc__), Store))
- return 0;
+ RETURN_IF_ERROR(compiler_nameop(c, NO_LOCATION, &_Py_ID(__doc__), Store));
}
}
- for (; i < asdl_seq_LEN(stmts); i++)
+ for (; i < asdl_seq_LEN(stmts); i++) {
VISIT(c, stmt, (stmt_ty)asdl_seq_GET(stmts, i));
- return 1;
+ }
+ return SUCCESS;
}
static int
compiler_codegen(struct compiler *c, mod_ty mod)
{
_Py_DECLARE_STR(anon_module, "");
- if (!compiler_enter_scope(c, &_Py_STR(anon_module), COMPILER_SCOPE_MODULE,
- mod, 1)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_enter_scope(c, &_Py_STR(anon_module), COMPILER_SCOPE_MODULE,
+ mod, 1));
+
location loc = LOCATION(1, 1, 0, 0);
switch (mod->kind) {
case Module_kind:
- if (!compiler_body(c, loc, mod->v.Module.body)) {
+ if (compiler_body(c, loc, mod->v.Module.body) < 0) {
compiler_exit_scope(c);
- return 0;
+ return ERROR;
}
break;
case Interactive_kind:
@@ -2194,16 +2190,16 @@ compiler_codegen(struct compiler *c, mod_ty mod)
PyErr_Format(PyExc_SystemError,
"module kind %d should not be possible",
mod->kind);
- return 0;
+ return ERROR;
}
- return 1;
+ return SUCCESS;
}
static PyCodeObject *
compiler_mod(struct compiler *c, mod_ty mod)
{
int addNone = mod->kind != Expression_kind;
- if (!compiler_codegen(c, mod)) {
+ if (compiler_codegen(c, mod) < 0) {
return NULL;
}
PyCodeObject *co = assemble(c, addNone);
@@ -2270,7 +2266,7 @@ compiler_make_closure(struct compiler *c, location loc,
*/
int reftype = get_ref_type(c, name);
if (reftype == -1) {
- return 0;
+ return ERROR;
}
int arg;
if (reftype == CELL) {
@@ -2293,7 +2289,7 @@ compiler_make_closure(struct compiler *c, location loc,
co->co_name,
freevars);
Py_DECREF(freevars);
- return 0;
+ return ERROR;
}
ADDOP_I(c, loc, LOAD_CLOSURE, arg);
}
@@ -2302,34 +2298,34 @@ compiler_make_closure(struct compiler *c, location loc,
}
ADDOP_LOAD_CONST(c, loc, (PyObject*)co);
ADDOP_I(c, loc, MAKE_FUNCTION, flags);
- return 1;
+ return SUCCESS;
}
static int
compiler_decorators(struct compiler *c, asdl_expr_seq* decos)
{
- int i;
-
- if (!decos)
- return 1;
+ if (!decos) {
+ return SUCCESS;
+ }
- for (i = 0; i < asdl_seq_LEN(decos); i++) {
+ for (Py_ssize_t i = 0; i < asdl_seq_LEN(decos); i++) {
VISIT(c, expr, (expr_ty)asdl_seq_GET(decos, i));
}
- return 1;
+ return SUCCESS;
}
static int
compiler_apply_decorators(struct compiler *c, asdl_expr_seq* decos)
{
- if (!decos)
- return 1;
+ if (!decos) {
+ return SUCCESS;
+ }
for (Py_ssize_t i = asdl_seq_LEN(decos) - 1; i > -1; i--) {
location loc = LOC((expr_ty)asdl_seq_GET(decos, i));
ADDOP_I(c, loc, CALL, 0);
}
- return 1;
+ return SUCCESS;
}
static int
@@ -2338,7 +2334,7 @@ compiler_visit_kwonlydefaults(struct compiler *c, location loc,
{
/* Push a dict of keyword-only default values.
- Return 0 on error, -1 if no dict pushed, 1 if a dict is pushed.
+ Return -1 on error, 0 if no dict pushed, 1 if a dict is pushed.
*/
int i;
PyObject *keys = NULL;
@@ -2355,7 +2351,7 @@ compiler_visit_kwonlydefaults(struct compiler *c, location loc,
keys = PyList_New(1);
if (keys == NULL) {
Py_DECREF(mangled);
- return 0;
+ return ERROR;
}
PyList_SET_ITEM(keys, 0, mangled);
}
@@ -2366,7 +2362,7 @@ compiler_visit_kwonlydefaults(struct compiler *c, location loc,
goto error;
}
}
- if (!compiler_visit_expr(c, default_)) {
+ if (compiler_visit_expr(c, default_) < 0) {
goto error;
}
}
@@ -2381,12 +2377,12 @@ compiler_visit_kwonlydefaults(struct compiler *c, location loc,
return 1;
}
else {
- return -1;
+ return 0;
}
error:
Py_XDECREF(keys);
- return 0;
+ return ERROR;
}
static int
@@ -2394,7 +2390,7 @@ compiler_visit_annexpr(struct compiler *c, expr_ty annotation)
{
location loc = LOC(annotation);
ADDOP_LOAD_CONST_NEW(c, loc, _PyAST_ExprAsUnicode(annotation));
- return 1;
+ return SUCCESS;
}
static int
@@ -2402,11 +2398,11 @@ compiler_visit_argannotation(struct compiler *c, identifier id,
expr_ty annotation, Py_ssize_t *annotations_len, location loc)
{
if (!annotation) {
- return 1;
+ return SUCCESS;
}
PyObject *mangled = _Py_Mangle(c->u->u_private, id);
if (!mangled) {
- return 0;
+ return ERROR;
}
ADDOP_LOAD_CONST(c, loc, mangled);
Py_DECREF(mangled);
@@ -2428,7 +2424,7 @@ compiler_visit_argannotation(struct compiler *c, identifier id,
}
}
*annotations_len += 2;
- return 1;
+ return SUCCESS;
}
static int
@@ -2438,15 +2434,15 @@ compiler_visit_argannotations(struct compiler *c, asdl_arg_seq* args,
int i;
for (i = 0; i < asdl_seq_LEN(args); i++) {
arg_ty arg = (arg_ty)asdl_seq_GET(args, i);
- if (!compiler_visit_argannotation(
+ RETURN_IF_ERROR(
+ compiler_visit_argannotation(
c,
arg->arg,
arg->annotation,
annotations_len,
- loc))
- return 0;
+ loc));
}
- return 1;
+ return SUCCESS;
}
static int
@@ -2456,36 +2452,40 @@ compiler_visit_annotations(struct compiler *c, location loc,
/* Push arg annotation names and values.
The expressions are evaluated out-of-order wrt the source code.
- Return 0 on error, -1 if no annotations pushed, 1 if a annotations is pushed.
+ Return -1 on error, 0 if no annotations pushed, 1 if a annotations is pushed.
*/
Py_ssize_t annotations_len = 0;
- if (!compiler_visit_argannotations(c, args->args, &annotations_len, loc))
- return 0;
- if (!compiler_visit_argannotations(c, args->posonlyargs, &annotations_len, loc))
- return 0;
- if (args->vararg && args->vararg->annotation &&
- !compiler_visit_argannotation(c, args->vararg->arg,
- args->vararg->annotation, &annotations_len, loc))
- return 0;
- if (!compiler_visit_argannotations(c, args->kwonlyargs, &annotations_len, loc))
- return 0;
- if (args->kwarg && args->kwarg->annotation &&
- !compiler_visit_argannotation(c, args->kwarg->arg,
- args->kwarg->annotation, &annotations_len, loc))
- return 0;
+ RETURN_IF_ERROR(
+ compiler_visit_argannotations(c, args->args, &annotations_len, loc));
- if (!compiler_visit_argannotation(c, &_Py_ID(return), returns,
- &annotations_len, loc)) {
- return 0;
+ RETURN_IF_ERROR(
+ compiler_visit_argannotations(c, args->posonlyargs, &annotations_len, loc));
+
+ if (args->vararg && args->vararg->annotation) {
+ RETURN_IF_ERROR(
+ compiler_visit_argannotation(c, args->vararg->arg,
+ args->vararg->annotation, &annotations_len, loc));
}
+ RETURN_IF_ERROR(
+ compiler_visit_argannotations(c, args->kwonlyargs, &annotations_len, loc));
+
+ if (args->kwarg && args->kwarg->annotation) {
+ RETURN_IF_ERROR(
+ compiler_visit_argannotation(c, args->kwarg->arg,
+ args->kwarg->annotation, &annotations_len, loc));
+ }
+
+ RETURN_IF_ERROR(
+ compiler_visit_argannotation(c, &_Py_ID(return), returns, &annotations_len, loc));
+
if (annotations_len) {
ADDOP_I(c, loc, BUILD_TUPLE, annotations_len);
return 1;
}
- return -1;
+ return 0;
}
static int
@@ -2494,7 +2494,7 @@ compiler_visit_defaults(struct compiler *c, arguments_ty args,
{
VISIT_SEQ(c, expr, args->defaults);
ADDOP_I(c, loc, BUILD_TUPLE, asdl_seq_LEN(args->defaults));
- return 1;
+ return SUCCESS;
}
static Py_ssize_t
@@ -2503,47 +2503,45 @@ compiler_default_arguments(struct compiler *c, location loc,
{
Py_ssize_t funcflags = 0;
if (args->defaults && asdl_seq_LEN(args->defaults) > 0) {
- if (!compiler_visit_defaults(c, args, loc))
- return -1;
+ RETURN_IF_ERROR(compiler_visit_defaults(c, args, loc));
funcflags |= 0x01;
}
if (args->kwonlyargs) {
int res = compiler_visit_kwonlydefaults(c, loc,
args->kwonlyargs,
args->kw_defaults);
- if (res == 0) {
- return -1;
- }
- else if (res > 0) {
+ RETURN_IF_ERROR(res);
+ if (res > 0) {
funcflags |= 0x02;
}
}
return funcflags;
}
-static int
+static bool
forbidden_name(struct compiler *c, location loc, identifier name,
expr_context_ty ctx)
{
if (ctx == Store && _PyUnicode_EqualToASCIIString(name, "__debug__")) {
compiler_error(c, loc, "cannot assign to __debug__");
- return 1;
+ return true;
}
if (ctx == Del && _PyUnicode_EqualToASCIIString(name, "__debug__")) {
compiler_error(c, loc, "cannot delete __debug__");
- return 1;
+ return true;
}
- return 0;
+ return false;
}
static int
compiler_check_debug_one_arg(struct compiler *c, arg_ty arg)
{
if (arg != NULL) {
- if (forbidden_name(c, LOC(arg), arg->arg, Store))
- return 0;
+ if (forbidden_name(c, LOC(arg), arg->arg, Store)) {
+ return ERROR;
+ }
}
- return 1;
+ return SUCCESS;
}
static int
@@ -2551,39 +2549,32 @@ compiler_check_debug_args_seq(struct compiler *c, asdl_arg_seq *args)
{
if (args != NULL) {
for (Py_ssize_t i = 0, n = asdl_seq_LEN(args); i < n; i++) {
- if (!compiler_check_debug_one_arg(c, asdl_seq_GET(args, i)))
- return 0;
+ RETURN_IF_ERROR(
+ compiler_check_debug_one_arg(c, asdl_seq_GET(args, i)));
}
}
- return 1;
+ return SUCCESS;
}
static int
compiler_check_debug_args(struct compiler *c, arguments_ty args)
{
- if (!compiler_check_debug_args_seq(c, args->posonlyargs))
- return 0;
- if (!compiler_check_debug_args_seq(c, args->args))
- return 0;
- if (!compiler_check_debug_one_arg(c, args->vararg))
- return 0;
- if (!compiler_check_debug_args_seq(c, args->kwonlyargs))
- return 0;
- if (!compiler_check_debug_one_arg(c, args->kwarg))
- return 0;
- return 1;
+ RETURN_IF_ERROR(compiler_check_debug_args_seq(c, args->posonlyargs));
+ RETURN_IF_ERROR(compiler_check_debug_args_seq(c, args->args));
+ RETURN_IF_ERROR(compiler_check_debug_one_arg(c, args->vararg));
+ RETURN_IF_ERROR(compiler_check_debug_args_seq(c, args->kwonlyargs));
+ RETURN_IF_ERROR(compiler_check_debug_one_arg(c, args->kwarg));
+ return SUCCESS;
}
static inline int
insert_instruction(basicblock *block, int pos, struct instr *instr) {
- if (basicblock_next_instr(block) < 0) {
- return -1;
- }
+ RETURN_IF_ERROR(basicblock_next_instr(block));
for (int i = block->b_iused - 1; i > pos; i--) {
block->b_instr[i] = block->b_instr[i-1];
}
block->b_instr[pos] = *instr;
- return 0;
+ return SUCCESS;
}
static int
@@ -2598,16 +2589,15 @@ wrap_in_stopiteration_handler(struct compiler *c)
.i_loc = NO_LOCATION,
.i_target = NULL,
};
- if (insert_instruction(c->u->u_cfg_builder.g_entryblock, 0, &setup)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ insert_instruction(c->u->u_cfg_builder.g_entryblock, 0, &setup));
ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None);
ADDOP(c, NO_LOCATION, RETURN_VALUE);
USE_LABEL(c, handler);
ADDOP(c, NO_LOCATION, STOPITERATION_ERROR);
ADDOP_I(c, NO_LOCATION, RERAISE, 1);
- return 1;
+ return SUCCESS;
}
static int
@@ -2647,11 +2637,8 @@ compiler_function(struct compiler *c, stmt_ty s, int is_async)
scope_type = COMPILER_SCOPE_FUNCTION;
}
- if (!compiler_check_debug_args(c, args))
- return 0;
-
- if (!compiler_decorators(c, decos))
- return 0;
+ RETURN_IF_ERROR(compiler_check_debug_args(c, args));
+ RETURN_IF_ERROR(compiler_decorators(c, decos));
firstlineno = s->lineno;
if (asdl_seq_LEN(decos)) {
@@ -2661,19 +2648,16 @@ compiler_function(struct compiler *c, stmt_ty s, int is_async)
location loc = LOC(s);
funcflags = compiler_default_arguments(c, loc, args);
if (funcflags == -1) {
- return 0;
+ return ERROR;
}
annotations = compiler_visit_annotations(c, loc, args, returns);
- if (annotations == 0) {
- return 0;
- }
- else if (annotations > 0) {
+ RETURN_IF_ERROR(annotations);
+ if (annotations > 0) {
funcflags |= 0x04;
}
- if (!compiler_enter_scope(c, name, scope_type, (void *)s, firstlineno)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_enter_scope(c, name, scope_type, (void *)s, firstlineno));
/* if not -OO mode, add docstring */
if (c->c_optimize < 2) {
@@ -2681,7 +2665,7 @@ compiler_function(struct compiler *c, stmt_ty s, int is_async)
}
if (compiler_add_const(c, docstring ? docstring : Py_None) < 0) {
compiler_exit_scope(c);
- return 0;
+ return ERROR;
}
c->u->u_argcount = asdl_seq_LEN(args->args);
@@ -2691,9 +2675,9 @@ compiler_function(struct compiler *c, stmt_ty s, int is_async)
VISIT_IN_SCOPE(c, stmt, (stmt_ty)asdl_seq_GET(body, i));
}
if (c->u->u_ste->ste_coroutine || c->u->u_ste->ste_generator) {
- if (!wrap_in_stopiteration_handler(c)) {
+ if (wrap_in_stopiteration_handler(c) < 0) {
compiler_exit_scope(c);
- return 0;
+ return ERROR;
}
}
co = assemble(c, 1);
@@ -2702,18 +2686,17 @@ compiler_function(struct compiler *c, stmt_ty s, int is_async)
if (co == NULL) {
Py_XDECREF(qualname);
Py_XDECREF(co);
- return 0;
+ return ERROR;
}
- if (!compiler_make_closure(c, loc, co, funcflags, qualname)) {
+ if (compiler_make_closure(c, loc, co, funcflags, qualname) < 0) {
Py_DECREF(qualname);
Py_DECREF(co);
- return 0;
+ return ERROR;
}
Py_DECREF(qualname);
Py_DECREF(co);
- if (!compiler_apply_decorators(c, decos))
- return 0;
+ RETURN_IF_ERROR(compiler_apply_decorators(c, decos));
return compiler_nameop(c, loc, name, Store);
}
@@ -2724,8 +2707,7 @@ compiler_class(struct compiler *c, stmt_ty s)
int i, firstlineno;
asdl_expr_seq *decos = s->v.ClassDef.decorator_list;
- if (!compiler_decorators(c, decos))
- return 0;
+ RETURN_IF_ERROR(compiler_decorators(c, decos));
firstlineno = s->lineno;
if (asdl_seq_LEN(decos)) {
@@ -2743,35 +2725,35 @@ compiler_class(struct compiler *c, stmt_ty s)
This borrows from compiler_call.
*/
/* 1. compile the class body into a code object */
- if (!compiler_enter_scope(c, s->v.ClassDef.name,
- COMPILER_SCOPE_CLASS, (void *)s, firstlineno)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_enter_scope(c, s->v.ClassDef.name,
+ COMPILER_SCOPE_CLASS, (void *)s, firstlineno));
+
/* this block represents what we do in the new scope */
{
location loc = LOCATION(firstlineno, firstlineno, 0, 0);
/* use the class name for name mangling */
Py_XSETREF(c->u->u_private, Py_NewRef(s->v.ClassDef.name));
/* load (global) __name__ ... */
- if (!compiler_nameop(c, loc, &_Py_ID(__name__), Load)) {
+ if (compiler_nameop(c, loc, &_Py_ID(__name__), Load) < 0) {
compiler_exit_scope(c);
- return 0;
+ return ERROR;
}
/* ... and store it as __module__ */
- if (!compiler_nameop(c, loc, &_Py_ID(__module__), Store)) {
+ if (compiler_nameop(c, loc, &_Py_ID(__module__), Store) < 0) {
compiler_exit_scope(c);
- return 0;
+ return ERROR;
}
assert(c->u->u_qualname);
ADDOP_LOAD_CONST(c, loc, c->u->u_qualname);
- if (!compiler_nameop(c, loc, &_Py_ID(__qualname__), Store)) {
+ if (compiler_nameop(c, loc, &_Py_ID(__qualname__), Store) < 0) {
compiler_exit_scope(c);
- return 0;
+ return ERROR;
}
/* compile the body proper */
- if (!compiler_body(c, loc, s->v.ClassDef.body)) {
+ if (compiler_body(c, loc, s->v.ClassDef.body) < 0) {
compiler_exit_scope(c);
- return 0;
+ return ERROR;
}
/* The following code is artificial */
/* Return __classcell__ if it is referenced, otherwise return None */
@@ -2780,14 +2762,14 @@ compiler_class(struct compiler *c, stmt_ty s)
i = compiler_lookup_arg(c->u->u_cellvars, &_Py_ID(__class__));
if (i < 0) {
compiler_exit_scope(c);
- return 0;
+ return ERROR;
}
assert(i == 0);
ADDOP_I(c, NO_LOCATION, LOAD_CLOSURE, i);
ADDOP_I(c, NO_LOCATION, COPY, 1);
- if (!compiler_nameop(c, NO_LOCATION, &_Py_ID(__classcell__), Store)) {
+ if (compiler_nameop(c, NO_LOCATION, &_Py_ID(__classcell__), Store) < 0) {
compiler_exit_scope(c);
- return 0;
+ return ERROR;
}
}
else {
@@ -2801,8 +2783,9 @@ compiler_class(struct compiler *c, stmt_ty s)
}
/* leave the new scope */
compiler_exit_scope(c);
- if (co == NULL)
- return 0;
+ if (co == NULL) {
+ return ERROR;
+ }
location loc = LOC(s);
/* 2. load the 'build_class' function */
@@ -2810,9 +2793,9 @@ compiler_class(struct compiler *c, stmt_ty s)
ADDOP(c, loc, LOAD_BUILD_CLASS);
/* 3. load a function (or closure) made from the code object */
- if (!compiler_make_closure(c, loc, co, 0, NULL)) {
+ if (compiler_make_closure(c, loc, co, 0, NULL) < 0) {
Py_DECREF(co);
- return 0;
+ return ERROR;
}
Py_DECREF(co);
@@ -2820,27 +2803,25 @@ compiler_class(struct compiler *c, stmt_ty s)
ADDOP_LOAD_CONST(c, loc, s->v.ClassDef.name);
/* 5. generate the rest of the code for the call */
- if (!compiler_call_helper(c, loc, 2,
- s->v.ClassDef.bases,
- s->v.ClassDef.keywords))
- return 0;
+ RETURN_IF_ERROR(compiler_call_helper(c, loc, 2,
+ s->v.ClassDef.bases,
+ s->v.ClassDef.keywords));
+
/* 6. apply decorators */
- if (!compiler_apply_decorators(c, decos))
- return 0;
+ RETURN_IF_ERROR(compiler_apply_decorators(c, decos));
/* 7. store into */
- if (!compiler_nameop(c, loc, s->v.ClassDef.name, Store))
- return 0;
- return 1;
+ RETURN_IF_ERROR(compiler_nameop(c, loc, s->v.ClassDef.name, Store));
+ return SUCCESS;
}
-/* Return 0 if the expression is a constant value except named singletons.
- Return 1 otherwise. */
-static int
+/* Return false if the expression is a constant value except named singletons.
+ Return true otherwise. */
+static bool
check_is_arg(expr_ty e)
{
if (e->kind != Constant_kind) {
- return 1;
+ return true;
}
PyObject *value = e->v.Constant.value;
return (value == Py_None
@@ -2849,19 +2830,18 @@ check_is_arg(expr_ty e)
|| value == Py_Ellipsis);
}
-/* Check operands of identity chacks ("is" and "is not").
+/* Check operands of identity checks ("is" and "is not").
Emit a warning if any operand is a constant except named singletons.
- Return 0 on error.
*/
static int
check_compare(struct compiler *c, expr_ty e)
{
Py_ssize_t i, n;
- int left = check_is_arg(e->v.Compare.left);
+ bool left = check_is_arg(e->v.Compare.left);
n = asdl_seq_LEN(e->v.Compare.ops);
for (i = 0; i < n; i++) {
cmpop_ty op = (cmpop_ty)asdl_seq_GET(e->v.Compare.ops, i);
- int right = check_is_arg((expr_ty)asdl_seq_GET(e->v.Compare.comparators, i));
+ bool right = check_is_arg((expr_ty)asdl_seq_GET(e->v.Compare.comparators, i));
if (op == Is || op == IsNot) {
if (!right || !left) {
const char *msg = (op == Is)
@@ -2872,7 +2852,7 @@ check_compare(struct compiler *c, expr_ty e)
}
left = right;
}
- return 1;
+ return SUCCESS;
}
static int compiler_addcompare(struct compiler *c, location loc,
@@ -2900,21 +2880,21 @@ static int compiler_addcompare(struct compiler *c, location loc,
break;
case Is:
ADDOP_I(c, loc, IS_OP, 0);
- return 1;
+ return SUCCESS;
case IsNot:
ADDOP_I(c, loc, IS_OP, 1);
- return 1;
+ return SUCCESS;
case In:
ADDOP_I(c, loc, CONTAINS_OP, 0);
- return 1;
+ return SUCCESS;
case NotIn:
ADDOP_I(c, loc, CONTAINS_OP, 1);
- return 1;
+ return SUCCESS;
default:
Py_UNREACHABLE();
}
ADDOP_I(c, loc, COMPARE_OP, cmp);
- return 1;
+ return SUCCESS;
}
@@ -2941,43 +2921,36 @@ compiler_jump_if(struct compiler *c, location loc,
next2 = new_next2;
}
for (i = 0; i < n; ++i) {
- if (!compiler_jump_if(c, loc, (expr_ty)asdl_seq_GET(s, i), next2, cond2)) {
- return 0;
- }
- }
- if (!compiler_jump_if(c, loc, (expr_ty)asdl_seq_GET(s, n), next, cond)) {
- return 0;
+ RETURN_IF_ERROR(
+ compiler_jump_if(c, loc, (expr_ty)asdl_seq_GET(s, i), next2, cond2));
}
+ RETURN_IF_ERROR(
+ compiler_jump_if(c, loc, (expr_ty)asdl_seq_GET(s, n), next, cond));
if (!SAME_LABEL(next2, next)) {
USE_LABEL(c, next2);
}
- return 1;
+ return SUCCESS;
}
case IfExp_kind: {
NEW_JUMP_TARGET_LABEL(c, end);
NEW_JUMP_TARGET_LABEL(c, next2);
- if (!compiler_jump_if(c, loc, e->v.IfExp.test, next2, 0)) {
- return 0;
- }
- if (!compiler_jump_if(c, loc, e->v.IfExp.body, next, cond)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_jump_if(c, loc, e->v.IfExp.test, next2, 0));
+ RETURN_IF_ERROR(
+ compiler_jump_if(c, loc, e->v.IfExp.body, next, cond));
ADDOP_JUMP(c, NO_LOCATION, JUMP, end);
USE_LABEL(c, next2);
- if (!compiler_jump_if(c, loc, e->v.IfExp.orelse, next, cond)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_jump_if(c, loc, e->v.IfExp.orelse, next, cond));
USE_LABEL(c, end);
- return 1;
+ return SUCCESS;
}
case Compare_kind: {
Py_ssize_t n = asdl_seq_LEN(e->v.Compare.ops) - 1;
if (n > 0) {
- if (!check_compare(c, e)) {
- return 0;
- }
+ RETURN_IF_ERROR(check_compare(c, e));
NEW_JUMP_TARGET_LABEL(c, cleanup);
VISIT(c, expr, e->v.Compare.left);
for (Py_ssize_t i = 0; i < n; i++) {
@@ -3001,7 +2974,7 @@ compiler_jump_if(struct compiler *c, location loc,
}
USE_LABEL(c, end);
- return 1;
+ return SUCCESS;
}
/* fallback to general implementation */
break;
@@ -3014,7 +2987,7 @@ compiler_jump_if(struct compiler *c, location loc,
/* general implementation */
VISIT(c, expr, e);
ADDOP_JUMP(c, LOC(e), cond ? POP_JUMP_IF_TRUE : POP_JUMP_IF_FALSE, next);
- return 1;
+ return SUCCESS;
}
static int
@@ -3024,9 +2997,9 @@ compiler_ifexp(struct compiler *c, expr_ty e)
NEW_JUMP_TARGET_LABEL(c, end);
NEW_JUMP_TARGET_LABEL(c, next);
- if (!compiler_jump_if(c, LOC(e), e->v.IfExp.test, next, 0)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_jump_if(c, LOC(e), e->v.IfExp.test, next, 0));
+
VISIT(c, expr, e->v.IfExp.body);
ADDOP_JUMP(c, NO_LOCATION, JUMP, end);
@@ -3034,7 +3007,7 @@ compiler_ifexp(struct compiler *c, expr_ty e)
VISIT(c, expr, e->v.IfExp.orelse);
USE_LABEL(c, end);
- return 1;
+ return SUCCESS;
}
static int
@@ -3046,8 +3019,7 @@ compiler_lambda(struct compiler *c, expr_ty e)
arguments_ty args = e->v.Lambda.args;
assert(e->kind == Lambda_kind);
- if (!compiler_check_debug_args(c, args))
- return 0;
+ RETURN_IF_ERROR(compiler_check_debug_args(c, args));
location loc = LOC(e);
funcflags = compiler_default_arguments(c, loc, args);
@@ -3056,14 +3028,13 @@ compiler_lambda(struct compiler *c, expr_ty e)
}
_Py_DECLARE_STR(anon_lambda, "");
- if (!compiler_enter_scope(c, &_Py_STR(anon_lambda), COMPILER_SCOPE_LAMBDA,
- (void *)e, e->lineno)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_enter_scope(c, &_Py_STR(anon_lambda), COMPILER_SCOPE_LAMBDA,
+ (void *)e, e->lineno));
+
/* Make None the first constant, so the lambda can't have a
docstring. */
- if (compiler_add_const(c, Py_None) < 0)
- return 0;
+ RETURN_IF_ERROR(compiler_add_const(c, Py_None));
c->u->u_argcount = asdl_seq_LEN(args->args);
c->u->u_posonlyargcount = asdl_seq_LEN(args->posonlyargs);
@@ -3081,18 +3052,18 @@ compiler_lambda(struct compiler *c, expr_ty e)
compiler_exit_scope(c);
if (co == NULL) {
Py_DECREF(qualname);
- return 0;
+ return ERROR;
}
- if (!compiler_make_closure(c, loc, co, funcflags, qualname)) {
+ if (compiler_make_closure(c, loc, co, funcflags, qualname) < 0) {
Py_DECREF(qualname);
Py_DECREF(co);
- return 0;
+ return ERROR;
}
Py_DECREF(qualname);
Py_DECREF(co);
- return 1;
+ return SUCCESS;
}
static int
@@ -3108,9 +3079,9 @@ compiler_if(struct compiler *c, stmt_ty s)
else {
next = end;
}
- if (!compiler_jump_if(c, LOC(s), s->v.If.test, next, 0)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_jump_if(c, LOC(s), s->v.If.test, next, 0));
+
VISIT_SEQ(c, stmt, s->v.If.body);
if (asdl_seq_LEN(s->v.If.orelse)) {
ADDOP_JUMP(c, NO_LOCATION, JUMP, end);
@@ -3120,7 +3091,7 @@ compiler_if(struct compiler *c, stmt_ty s)
}
USE_LABEL(c, end);
- return 1;
+ return SUCCESS;
}
static int
@@ -3132,9 +3103,8 @@ compiler_for(struct compiler *c, stmt_ty s)
NEW_JUMP_TARGET_LABEL(c, cleanup);
NEW_JUMP_TARGET_LABEL(c, end);
- if (!compiler_push_fblock(c, loc, FOR_LOOP, start, end, NULL)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_push_fblock(c, loc, FOR_LOOP, start, end, NULL));
+
VISIT(c, expr, s->v.For.iter);
ADDOP(c, loc, GET_ITER);
@@ -3155,7 +3125,7 @@ compiler_for(struct compiler *c, stmt_ty s)
VISIT_SEQ(c, stmt, s->v.For.orelse);
USE_LABEL(c, end);
- return 1;
+ return SUCCESS;
}
@@ -3177,9 +3147,8 @@ compiler_async_for(struct compiler *c, stmt_ty s)
ADDOP(c, loc, GET_AITER);
USE_LABEL(c, start);
- if (!compiler_push_fblock(c, loc, FOR_LOOP, start, end, NULL)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_push_fblock(c, loc, FOR_LOOP, start, end, NULL));
+
/* SETUP_FINALLY to guard the __anext__ call */
ADDOP_JUMP(c, loc, SETUP_FINALLY, except);
ADDOP(c, loc, GET_ANEXT);
@@ -3207,7 +3176,7 @@ compiler_async_for(struct compiler *c, stmt_ty s)
VISIT_SEQ(c, stmt, s->v.For.orelse);
USE_LABEL(c, end);
- return 1;
+ return SUCCESS;
}
static int
@@ -3219,18 +3188,13 @@ compiler_while(struct compiler *c, stmt_ty s)
NEW_JUMP_TARGET_LABEL(c, anchor);
USE_LABEL(c, loop);
- if (!compiler_push_fblock(c, LOC(s), WHILE_LOOP, loop, end, NULL)) {
- return 0;
- }
- if (!compiler_jump_if(c, LOC(s), s->v.While.test, anchor, 0)) {
- return 0;
- }
+
+ RETURN_IF_ERROR(compiler_push_fblock(c, LOC(s), WHILE_LOOP, loop, end, NULL));
+ RETURN_IF_ERROR(compiler_jump_if(c, LOC(s), s->v.While.test, anchor, 0));
USE_LABEL(c, body);
VISIT_SEQ(c, stmt, s->v.While.body);
- if (!compiler_jump_if(c, LOC(s), s->v.While.test, body, 1)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_jump_if(c, LOC(s), s->v.While.test, body, 1));
compiler_pop_fblock(c, WHILE_LOOP, loop);
@@ -3240,7 +3204,7 @@ compiler_while(struct compiler *c, stmt_ty s)
}
USE_LABEL(c, end);
- return 1;
+ return SUCCESS;
}
static int
@@ -3249,13 +3213,13 @@ compiler_return(struct compiler *c, stmt_ty s)
location loc = LOC(s);
int preserve_tos = ((s->v.Return.value != NULL) &&
(s->v.Return.value->kind != Constant_kind));
- if (c->u->u_ste->ste_type != FunctionBlock)
+ if (c->u->u_ste->ste_type != FunctionBlock) {
return compiler_error(c, loc, "'return' outside function");
+ }
if (s->v.Return.value != NULL &&
c->u->u_ste->ste_coroutine && c->u->u_ste->ste_generator)
{
- return compiler_error(
- c, loc, "'return' with value in async generator");
+ return compiler_error(c, loc, "'return' with value in async generator");
}
if (preserve_tos) {
@@ -3272,8 +3236,7 @@ compiler_return(struct compiler *c, stmt_ty s)
ADDOP(c, loc, NOP);
}
- if (!compiler_unwind_fblock_stack(c, &loc, preserve_tos, NULL))
- return 0;
+ RETURN_IF_ERROR(compiler_unwind_fblock_stack(c, &loc, preserve_tos, NULL));
if (s->v.Return.value == NULL) {
ADDOP_LOAD_CONST(c, loc, Py_None);
}
@@ -3282,7 +3245,7 @@ compiler_return(struct compiler *c, stmt_ty s)
}
ADDOP(c, loc, RETURN_VALUE);
- return 1;
+ return SUCCESS;
}
static int
@@ -3291,17 +3254,13 @@ compiler_break(struct compiler *c, location loc)
struct fblockinfo *loop = NULL;
/* Emit instruction with line number */
ADDOP(c, loc, NOP);
- if (!compiler_unwind_fblock_stack(c, &loc, 0, &loop)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_unwind_fblock_stack(c, &loc, 0, &loop));
if (loop == NULL) {
return compiler_error(c, loc, "'break' outside loop");
}
- if (!compiler_unwind_fblock(c, &loc, loop, 0)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_unwind_fblock(c, &loc, loop, 0));
ADDOP_JUMP(c, loc, JUMP, loop->fb_exit);
- return 1;
+ return SUCCESS;
}
static int
@@ -3310,14 +3269,12 @@ compiler_continue(struct compiler *c, location loc)
struct fblockinfo *loop = NULL;
/* Emit instruction with line number */
ADDOP(c, loc, NOP);
- if (!compiler_unwind_fblock_stack(c, &loc, 0, &loop)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_unwind_fblock_stack(c, &loc, 0, &loop));
if (loop == NULL) {
return compiler_error(c, loc, "'continue' not properly in loop");
}
ADDOP_JUMP(c, loc, JUMP, loop->fb_block);
- return 1;
+ return SUCCESS;
}
@@ -3376,11 +3333,12 @@ compiler_try_finally(struct compiler *c, stmt_ty s)
ADDOP_JUMP(c, loc, SETUP_FINALLY, end);
USE_LABEL(c, body);
- if (!compiler_push_fblock(c, loc, FINALLY_TRY, body, end, s->v.Try.finalbody))
- return 0;
+ RETURN_IF_ERROR(
+ compiler_push_fblock(c, loc, FINALLY_TRY, body, end,
+ s->v.Try.finalbody));
+
if (s->v.Try.handlers && asdl_seq_LEN(s->v.Try.handlers)) {
- if (!compiler_try_except(c, s))
- return 0;
+ RETURN_IF_ERROR(compiler_try_except(c, s));
}
else {
VISIT_SEQ(c, stmt, s->v.Try.body);
@@ -3397,8 +3355,8 @@ compiler_try_finally(struct compiler *c, stmt_ty s)
loc = NO_LOCATION;
ADDOP_JUMP(c, loc, SETUP_CLEANUP, cleanup);
ADDOP(c, loc, PUSH_EXC_INFO);
- if (!compiler_push_fblock(c, loc, FINALLY_END, end, NO_LABEL, NULL))
- return 0;
+ RETURN_IF_ERROR(
+ compiler_push_fblock(c, loc, FINALLY_END, end, NO_LABEL, NULL));
VISIT_SEQ(c, stmt, s->v.Try.finalbody);
loc = location_of_last_executing_statement(s->v.Try.finalbody);
compiler_pop_fblock(c, FINALLY_END, end);
@@ -3409,7 +3367,7 @@ compiler_try_finally(struct compiler *c, stmt_ty s)
POP_EXCEPT_AND_RERAISE(c, loc);
USE_LABEL(c, exit);
- return 1;
+ return SUCCESS;
}
static int
@@ -3425,13 +3383,12 @@ compiler_try_star_finally(struct compiler *c, stmt_ty s)
ADDOP_JUMP(c, loc, SETUP_FINALLY, end);
USE_LABEL(c, body);
- if (!compiler_push_fblock(c, loc, FINALLY_TRY, body, end, s->v.TryStar.finalbody)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_push_fblock(c, loc, FINALLY_TRY, body, end,
+ s->v.TryStar.finalbody));
+
if (s->v.TryStar.handlers && asdl_seq_LEN(s->v.TryStar.handlers)) {
- if (!compiler_try_star_except(c, s)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_try_star_except(c, s));
}
else {
VISIT_SEQ(c, stmt, s->v.TryStar.body);
@@ -3448,9 +3405,9 @@ compiler_try_star_finally(struct compiler *c, stmt_ty s)
loc = NO_LOCATION;
ADDOP_JUMP(c, loc, SETUP_CLEANUP, cleanup);
ADDOP(c, loc, PUSH_EXC_INFO);
- if (!compiler_push_fblock(c, loc, FINALLY_END, end, NO_LABEL, NULL)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_push_fblock(c, loc, FINALLY_END, end, NO_LABEL, NULL));
+
VISIT_SEQ(c, stmt, s->v.TryStar.finalbody);
loc = location_of_last_executing_statement(s->v.Try.finalbody);
@@ -3461,7 +3418,7 @@ compiler_try_star_finally(struct compiler *c, stmt_ty s)
POP_EXCEPT_AND_RERAISE(c, loc);
USE_LABEL(c, exit);
- return 1;
+ return SUCCESS;
}
@@ -3507,8 +3464,8 @@ compiler_try_except(struct compiler *c, stmt_ty s)
ADDOP_JUMP(c, loc, SETUP_FINALLY, except);
USE_LABEL(c, body);
- if (!compiler_push_fblock(c, loc, TRY_EXCEPT, body, NO_LABEL, NULL))
- return 0;
+ RETURN_IF_ERROR(
+ compiler_push_fblock(c, loc, TRY_EXCEPT, body, NO_LABEL, NULL));
VISIT_SEQ(c, stmt, s->v.Try.body);
compiler_pop_fblock(c, TRY_EXCEPT, body);
ADDOP(c, NO_LOCATION, POP_BLOCK);
@@ -3522,9 +3479,11 @@ compiler_try_except(struct compiler *c, stmt_ty s)
ADDOP_JUMP(c, NO_LOCATION, SETUP_CLEANUP, cleanup);
ADDOP(c, NO_LOCATION, PUSH_EXC_INFO);
+
/* Runtime will push a block here, so we need to account for that */
- if (!compiler_push_fblock(c, loc, EXCEPTION_HANDLER, NO_LABEL, NO_LABEL, NULL))
- return 0;
+ RETURN_IF_ERROR(
+ compiler_push_fblock(c, loc, EXCEPTION_HANDLER, NO_LABEL, NO_LABEL, NULL));
+
for (i = 0; i < n; i++) {
excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET(
s->v.Try.handlers, i);
@@ -3543,7 +3502,8 @@ compiler_try_except(struct compiler *c, stmt_ty s)
NEW_JUMP_TARGET_LABEL(c, cleanup_end);
NEW_JUMP_TARGET_LABEL(c, cleanup_body);
- compiler_nameop(c, loc, handler->v.ExceptHandler.name, Store);
+ RETURN_IF_ERROR(
+ compiler_nameop(c, loc, handler->v.ExceptHandler.name, Store));
/*
try:
@@ -3560,10 +3520,9 @@ compiler_try_except(struct compiler *c, stmt_ty s)
ADDOP_JUMP(c, loc, SETUP_CLEANUP, cleanup_end);
USE_LABEL(c, cleanup_body);
- if (!compiler_push_fblock(c, loc, HANDLER_CLEANUP, cleanup_body,
- NO_LABEL, handler->v.ExceptHandler.name)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_push_fblock(c, loc, HANDLER_CLEANUP, cleanup_body,
+ NO_LABEL, handler->v.ExceptHandler.name));
/* second # body */
VISIT_SEQ(c, stmt, handler->v.ExceptHandler.body);
@@ -3573,8 +3532,10 @@ compiler_try_except(struct compiler *c, stmt_ty s)
ADDOP(c, NO_LOCATION, POP_BLOCK);
ADDOP(c, NO_LOCATION, POP_EXCEPT);
ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None);
- compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store);
- compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del);
+ RETURN_IF_ERROR(
+ compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store));
+ RETURN_IF_ERROR(
+ compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del));
ADDOP_JUMP(c, NO_LOCATION, JUMP, end);
/* except: */
@@ -3582,8 +3543,10 @@ compiler_try_except(struct compiler *c, stmt_ty s)
/* name = None; del name; # artificial */
ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None);
- compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store);
- compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del);
+ RETURN_IF_ERROR(
+ compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store));
+ RETURN_IF_ERROR(
+ compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del));
ADDOP_I(c, NO_LOCATION, RERAISE, 1);
}
@@ -3593,8 +3556,10 @@ compiler_try_except(struct compiler *c, stmt_ty s)
ADDOP(c, loc, POP_TOP); /* exc_value */
USE_LABEL(c, cleanup_body);
- if (!compiler_push_fblock(c, loc, HANDLER_CLEANUP, cleanup_body, NO_LABEL, NULL))
- return 0;
+ RETURN_IF_ERROR(
+ compiler_push_fblock(c, loc, HANDLER_CLEANUP, cleanup_body,
+ NO_LABEL, NULL));
+
VISIT_SEQ(c, stmt, handler->v.ExceptHandler.body);
compiler_pop_fblock(c, HANDLER_CLEANUP, cleanup_body);
ADDOP(c, NO_LOCATION, POP_BLOCK);
@@ -3612,7 +3577,7 @@ compiler_try_except(struct compiler *c, stmt_ty s)
POP_EXCEPT_AND_RERAISE(c, NO_LOCATION);
USE_LABEL(c, end);
- return 1;
+ return SUCCESS;
}
/*
@@ -3679,9 +3644,8 @@ compiler_try_star_except(struct compiler *c, stmt_ty s)
ADDOP_JUMP(c, loc, SETUP_FINALLY, except);
USE_LABEL(c, body);
- if (!compiler_push_fblock(c, loc, TRY_EXCEPT, body, NO_LABEL, NULL)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_push_fblock(c, loc, TRY_EXCEPT, body, NO_LABEL, NULL));
VISIT_SEQ(c, stmt, s->v.TryStar.body);
compiler_pop_fblock(c, TRY_EXCEPT, body);
ADDOP(c, NO_LOCATION, POP_BLOCK);
@@ -3692,11 +3656,12 @@ compiler_try_star_except(struct compiler *c, stmt_ty s)
ADDOP_JUMP(c, NO_LOCATION, SETUP_CLEANUP, cleanup);
ADDOP(c, NO_LOCATION, PUSH_EXC_INFO);
+
/* Runtime will push a block here, so we need to account for that */
- if (!compiler_push_fblock(c, loc, EXCEPTION_GROUP_HANDLER,
- NO_LABEL, NO_LABEL, "except handler")) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_push_fblock(c, loc, EXCEPTION_GROUP_HANDLER,
+ NO_LABEL, NO_LABEL, "except handler"));
+
for (Py_ssize_t i = 0; i < n; i++) {
excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET(
s->v.TryStar.handlers, i);
@@ -3736,7 +3701,8 @@ compiler_try_star_except(struct compiler *c, stmt_ty s)
NEW_JUMP_TARGET_LABEL(c, cleanup_body);
if (handler->v.ExceptHandler.name) {
- compiler_nameop(c, loc, handler->v.ExceptHandler.name, Store);
+ RETURN_IF_ERROR(
+ compiler_nameop(c, loc, handler->v.ExceptHandler.name, Store));
}
else {
ADDOP(c, loc, POP_TOP); // match
@@ -3756,9 +3722,9 @@ compiler_try_star_except(struct compiler *c, stmt_ty s)
ADDOP_JUMP(c, loc, SETUP_CLEANUP, cleanup_end);
USE_LABEL(c, cleanup_body);
- if (!compiler_push_fblock(c, loc, HANDLER_CLEANUP, cleanup_body, NO_LABEL, handler->v.ExceptHandler.name)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_push_fblock(c, loc, HANDLER_CLEANUP, cleanup_body,
+ NO_LABEL, handler->v.ExceptHandler.name));
/* second # body */
VISIT_SEQ(c, stmt, handler->v.ExceptHandler.body);
@@ -3767,8 +3733,10 @@ compiler_try_star_except(struct compiler *c, stmt_ty s)
ADDOP(c, NO_LOCATION, POP_BLOCK);
if (handler->v.ExceptHandler.name) {
ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None);
- compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store);
- compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del);
+ RETURN_IF_ERROR(
+ compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store));
+ RETURN_IF_ERROR(
+ compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del));
}
ADDOP_JUMP(c, NO_LOCATION, JUMP, except);
@@ -3778,8 +3746,10 @@ compiler_try_star_except(struct compiler *c, stmt_ty s)
/* name = None; del name; # artificial */
if (handler->v.ExceptHandler.name) {
ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None);
- compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store);
- compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del);
+ RETURN_IF_ERROR(
+ compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store));
+ RETURN_IF_ERROR(
+ compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del));
}
/* add exception raised to the res list */
@@ -3823,7 +3793,7 @@ compiler_try_star_except(struct compiler *c, stmt_ty s)
VISIT_SEQ(c, stmt, s->v.TryStar.orelse);
USE_LABEL(c, end);
- return 1;
+ return SUCCESS;
}
static int
@@ -3857,19 +3827,22 @@ compiler_import_as(struct compiler *c, location loc,
*/
Py_ssize_t len = PyUnicode_GET_LENGTH(name);
Py_ssize_t dot = PyUnicode_FindChar(name, '.', 0, len, 1);
- if (dot == -2)
- return 0;
+ if (dot == -2) {
+ return ERROR;
+ }
if (dot != -1) {
/* Consume the base module name to get the first attribute */
while (1) {
Py_ssize_t pos = dot + 1;
PyObject *attr;
dot = PyUnicode_FindChar(name, '.', pos, len, 1);
- if (dot == -2)
- return 0;
+ if (dot == -2) {
+ return ERROR;
+ }
attr = PyUnicode_Substring(name, pos, (dot != -1) ? dot : len);
- if (!attr)
- return 0;
+ if (!attr) {
+ return ERROR;
+ }
ADDOP_N(c, loc, IMPORT_FROM, attr, names);
if (dot == -1) {
break;
@@ -3877,11 +3850,9 @@ compiler_import_as(struct compiler *c, location loc,
ADDOP_I(c, loc, SWAP, 2);
ADDOP(c, loc, POP_TOP);
}
- if (!compiler_nameop(c, loc, asname, Store)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_nameop(c, loc, asname, Store));
ADDOP(c, loc, POP_TOP);
- return 1;
+ return SUCCESS;
}
return compiler_nameop(c, loc, asname, Store);
}
@@ -3910,8 +3881,7 @@ compiler_import(struct compiler *c, stmt_ty s)
if (alias->asname) {
r = compiler_import_as(c, loc, alias->name, alias->asname);
- if (!r)
- return r;
+ RETURN_IF_ERROR(r);
}
else {
identifier tmp = alias->name;
@@ -3919,18 +3889,18 @@ compiler_import(struct compiler *c, stmt_ty s)
alias->name, '.', 0, PyUnicode_GET_LENGTH(alias->name), 1);
if (dot != -1) {
tmp = PyUnicode_Substring(alias->name, 0, dot);
- if (tmp == NULL)
- return 0;
+ if (tmp == NULL) {
+ return ERROR;
+ }
}
r = compiler_nameop(c, loc, tmp, Store);
if (dot != -1) {
Py_DECREF(tmp);
}
- if (!r)
- return r;
+ RETURN_IF_ERROR(r);
}
}
- return 1;
+ return SUCCESS;
}
static int
@@ -3942,7 +3912,7 @@ compiler_from_import(struct compiler *c, stmt_ty s)
PyObject *names = PyTuple_New(n);
if (!names) {
- return 0;
+ return ERROR;
}
/* build up the names */
@@ -3975,7 +3945,7 @@ compiler_from_import(struct compiler *c, stmt_ty s)
if (i == 0 && PyUnicode_READ_CHAR(alias->name, 0) == '*') {
assert(n == 1);
ADDOP(c, LOC(s), IMPORT_STAR);
- return 1;
+ return SUCCESS;
}
ADDOP_NAME(c, LOC(s), IMPORT_FROM, alias->name, names);
@@ -3984,13 +3954,11 @@ compiler_from_import(struct compiler *c, stmt_ty s)
store_name = alias->asname;
}
- if (!compiler_nameop(c, LOC(s), store_name, Store)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_nameop(c, LOC(s), store_name, Store));
}
/* remove imported module */
ADDOP(c, LOC(s), POP_TOP);
- return 1;
+ return SUCCESS;
}
static int
@@ -4003,19 +3971,15 @@ compiler_assert(struct compiler *c, stmt_ty s)
PyTuple_Check(s->v.Assert.test->v.Constant.value) &&
PyTuple_Size(s->v.Assert.test->v.Constant.value) > 0))
{
- if (!compiler_warn(c, LOC(s), "assertion is always true, "
- "perhaps remove parentheses?"))
- {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_warn(c, LOC(s), "assertion is always true, "
+ "perhaps remove parentheses?"));
}
if (c->c_optimize) {
- return 1;
+ return SUCCESS;
}
NEW_JUMP_TARGET_LABEL(c, end);
- if (!compiler_jump_if(c, LOC(s), s->v.Assert.test, end, 1)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_jump_if(c, LOC(s), s->v.Assert.test, end, 1));
ADDOP(c, LOC(s), LOAD_ASSERTION_ERROR);
if (s->v.Assert.msg) {
VISIT(c, expr, s->v.Assert.msg);
@@ -4024,7 +3988,7 @@ compiler_assert(struct compiler *c, stmt_ty s)
ADDOP_I(c, LOC(s), RAISE_VARARGS, 1);
USE_LABEL(c, end);
- return 1;
+ return SUCCESS;
}
static int
@@ -4033,18 +3997,18 @@ compiler_stmt_expr(struct compiler *c, location loc, expr_ty value)
if (c->c_interactive && c->c_nestlevel <= 1) {
VISIT(c, expr, value);
ADDOP(c, loc, PRINT_EXPR);
- return 1;
+ return SUCCESS;
}
if (value->kind == Constant_kind) {
/* ignore constant statement */
ADDOP(c, loc, NOP);
- return 1;
+ return SUCCESS;
}
VISIT(c, expr, value);
ADDOP(c, NO_LOCATION, POP_TOP); /* artificial */
- return 1;
+ return SUCCESS;
}
static int
@@ -4140,7 +4104,7 @@ compiler_visit_stmt(struct compiler *c, stmt_ty s)
return compiler_async_for(c, s);
}
- return 1;
+ return SUCCESS;
}
static int
@@ -4210,10 +4174,10 @@ addop_binary(struct compiler *c, location loc, operator_ty binop,
default:
PyErr_Format(PyExc_SystemError, "%s op %d should not be possible",
inplace ? "inplace" : "binary", binop);
- return 0;
+ return ERROR;
}
ADDOP_I(c, loc, BINARY_OP, oparg);
- return 1;
+ return SUCCESS;
}
@@ -4224,7 +4188,7 @@ addop_yield(struct compiler *c, location loc) {
}
ADDOP_I(c, loc, YIELD_VALUE, 0);
ADDOP_I(c, loc, RESUME, 1);
- return 1;
+ return SUCCESS;
}
static int
@@ -4242,12 +4206,14 @@ compiler_nameop(struct compiler *c, location loc,
!_PyUnicode_EqualToASCIIString(name, "True") &&
!_PyUnicode_EqualToASCIIString(name, "False"));
- if (forbidden_name(c, loc, name, ctx))
- return 0;
+ if (forbidden_name(c, loc, name, ctx)) {
+ return ERROR;
+ }
mangled = _Py_Mangle(c->u->u_private, name);
- if (!mangled)
- return 0;
+ if (!mangled) {
+ return ERROR;
+ }
op = 0;
optype = OP_NAME;
@@ -4297,7 +4263,7 @@ compiler_nameop(struct compiler *c, location loc,
case Del: op = DELETE_FAST; break;
}
ADDOP_N(c, loc, op, mangled, varnames);
- return 1;
+ return SUCCESS;
case OP_GLOBAL:
switch (ctx) {
case Load: op = LOAD_GLOBAL; break;
@@ -4318,7 +4284,7 @@ compiler_nameop(struct compiler *c, location loc,
arg = dict_add_o(dict, mangled);
Py_DECREF(mangled);
if (arg < 0) {
- return 0;
+ return ERROR;
}
if (op == LOAD_GLOBAL) {
arg <<= 1;
@@ -4353,7 +4319,7 @@ compiler_boolop(struct compiler *c, expr_ty e)
VISIT(c, expr, (expr_ty)asdl_seq_GET(s, n));
USE_LABEL(c, end);
- return 1;
+ return SUCCESS;
}
static int
@@ -4365,7 +4331,7 @@ starunpack_helper(struct compiler *c, location loc,
if (n > 2 && are_all_items_const(elts, 0, n)) {
PyObject *folded = PyTuple_New(n);
if (folded == NULL) {
- return 0;
+ return ERROR;
}
PyObject *val;
for (Py_ssize_t i = 0; i < n; i++) {
@@ -4378,7 +4344,7 @@ starunpack_helper(struct compiler *c, location loc,
if (add == SET_ADD) {
Py_SETREF(folded, PyFrozenSet_New(folded));
if (folded == NULL) {
- return 0;
+ return ERROR;
}
}
ADDOP_I(c, loc, build, pushed);
@@ -4388,7 +4354,7 @@ starunpack_helper(struct compiler *c, location loc,
ADDOP(c, loc, LIST_TO_TUPLE);
}
}
- return 1;
+ return SUCCESS;
}
int big = n+pushed > STACK_USE_GUIDELINE;
@@ -4410,7 +4376,7 @@ starunpack_helper(struct compiler *c, location loc,
} else {
ADDOP_I(c, loc, build, n+pushed);
}
- return 1;
+ return SUCCESS;
}
int sequence_built = 0;
if (big) {
@@ -4438,7 +4404,7 @@ starunpack_helper(struct compiler *c, location loc,
if (tuple) {
ADDOP(c, loc, LIST_TO_TUPLE);
}
- return 1;
+ return SUCCESS;
}
static int
@@ -4450,10 +4416,11 @@ unpack_helper(struct compiler *c, location loc, asdl_expr_seq *elts)
expr_ty elt = asdl_seq_GET(elts, i);
if (elt->kind == Starred_kind && !seen_star) {
if ((i >= (1 << 8)) ||
- (n-i-1 >= (INT_MAX >> 8)))
+ (n-i-1 >= (INT_MAX >> 8))) {
return compiler_error(c, loc,
"too many expressions in "
"star-unpacking assignment");
+ }
ADDOP_I(c, loc, UNPACK_EX, (i + ((n-i-1) << 8)));
seen_star = 1;
}
@@ -4465,19 +4432,19 @@ unpack_helper(struct compiler *c, location loc, asdl_expr_seq *elts)
if (!seen_star) {
ADDOP_I(c, loc, UNPACK_SEQUENCE, n);
}
- return 1;
+ return SUCCESS;
}
static int
assignment_helper(struct compiler *c, location loc, asdl_expr_seq *elts)
{
Py_ssize_t n = asdl_seq_LEN(elts);
- RETURN_IF_FALSE(unpack_helper(c, loc, elts));
+ RETURN_IF_ERROR(unpack_helper(c, loc, elts));
for (Py_ssize_t i = 0; i < n; i++) {
expr_ty elt = asdl_seq_GET(elts, i);
VISIT(c, expr, elt->kind != Starred_kind ? elt : elt->v.Starred.value);
}
- return 1;
+ return SUCCESS;
}
static int
@@ -4492,9 +4459,10 @@ compiler_list(struct compiler *c, expr_ty e)
return starunpack_helper(c, loc, elts, 0,
BUILD_LIST, LIST_APPEND, LIST_EXTEND, 0);
}
- else
+ else {
VISIT_SEQ(c, expr, elts);
- return 1;
+ }
+ return SUCCESS;
}
static int
@@ -4509,9 +4477,10 @@ compiler_tuple(struct compiler *c, expr_ty e)
return starunpack_helper(c, loc, elts, 0,
BUILD_LIST, LIST_APPEND, LIST_EXTEND, 1);
}
- else
+ else {
VISIT_SEQ(c, expr, elts);
- return 1;
+ }
+ return SUCCESS;
}
static int
@@ -4522,16 +4491,16 @@ compiler_set(struct compiler *c, expr_ty e)
BUILD_SET, SET_ADD, SET_UPDATE, 0);
}
-static int
+static bool
are_all_items_const(asdl_expr_seq *seq, Py_ssize_t begin, Py_ssize_t end)
{
- Py_ssize_t i;
- for (i = begin; i < end; i++) {
+ for (Py_ssize_t i = begin; i < end; i++) {
expr_ty key = (expr_ty)asdl_seq_GET(seq, i);
- if (key == NULL || key->kind != Constant_kind)
- return 0;
+ if (key == NULL || key->kind != Constant_kind) {
+ return false;
+ }
}
- return 1;
+ return true;
}
static int
@@ -4547,7 +4516,7 @@ compiler_subdict(struct compiler *c, expr_ty e, Py_ssize_t begin, Py_ssize_t end
}
keys = PyTuple_New(n);
if (keys == NULL) {
- return 0;
+ return SUCCESS;
}
for (i = begin; i < end; i++) {
key = ((expr_ty)asdl_seq_GET(e->v.Dict.keys, i))->v.Constant.value;
@@ -4555,7 +4524,7 @@ compiler_subdict(struct compiler *c, expr_ty e, Py_ssize_t begin, Py_ssize_t end
}
ADDOP_LOAD_CONST_NEW(c, loc, keys);
ADDOP_I(c, loc, BUILD_CONST_KEY_MAP, n);
- return 1;
+ return SUCCESS;
}
if (big) {
ADDOP_I(c, loc, BUILD_MAP, 0);
@@ -4570,7 +4539,7 @@ compiler_subdict(struct compiler *c, expr_ty e, Py_ssize_t begin, Py_ssize_t end
if (!big) {
ADDOP_I(c, loc, BUILD_MAP, n);
}
- return 1;
+ return SUCCESS;
}
static int
@@ -4587,9 +4556,7 @@ compiler_dict(struct compiler *c, expr_ty e)
is_unpacking = (expr_ty)asdl_seq_GET(e->v.Dict.keys, i) == NULL;
if (is_unpacking) {
if (elements) {
- if (!compiler_subdict(c, e, i - elements, i)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_subdict(c, e, i - elements, i));
if (have_dict) {
ADDOP_I(c, loc, DICT_UPDATE, 1);
}
@@ -4605,9 +4572,7 @@ compiler_dict(struct compiler *c, expr_ty e)
}
else {
if (elements*2 > STACK_USE_GUIDELINE) {
- if (!compiler_subdict(c, e, i - elements, i + 1)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_subdict(c, e, i - elements, i + 1));
if (have_dict) {
ADDOP_I(c, loc, DICT_UPDATE, 1);
}
@@ -4620,9 +4585,7 @@ compiler_dict(struct compiler *c, expr_ty e)
}
}
if (elements) {
- if (!compiler_subdict(c, e, n - elements, n)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_subdict(c, e, n - elements, n));
if (have_dict) {
ADDOP_I(c, loc, DICT_UPDATE, 1);
}
@@ -4631,7 +4594,7 @@ compiler_dict(struct compiler *c, expr_ty e)
if (!have_dict) {
ADDOP_I(c, loc, BUILD_MAP, 0);
}
- return 1;
+ return SUCCESS;
}
static int
@@ -4640,9 +4603,7 @@ compiler_compare(struct compiler *c, expr_ty e)
location loc = LOC(e);
Py_ssize_t i, n;
- if (!check_compare(c, e)) {
- return 0;
- }
+ RETURN_IF_ERROR(check_compare(c, e));
VISIT(c, expr, e->v.Compare.left);
assert(asdl_seq_LEN(e->v.Compare.ops) > 0);
n = asdl_seq_LEN(e->v.Compare.ops) - 1;
@@ -4671,7 +4632,7 @@ compiler_compare(struct compiler *c, expr_ty e)
USE_LABEL(c, end);
}
- return 1;
+ return SUCCESS;
}
static PyTypeObject *
@@ -4724,7 +4685,7 @@ check_caller(struct compiler *c, expr_ty e)
infer_type(e)->tp_name);
}
default:
- return 1;
+ return SUCCESS;
}
}
@@ -4740,7 +4701,7 @@ check_subscripter(struct compiler *c, expr_ty e)
PyLong_Check(v) || PyFloat_Check(v) || PyComplex_Check(v) ||
PyAnySet_Check(v)))
{
- return 1;
+ return SUCCESS;
}
/* fall through */
case Set_kind:
@@ -4753,7 +4714,7 @@ check_subscripter(struct compiler *c, expr_ty e)
infer_type(e)->tp_name);
}
default:
- return 1;
+ return SUCCESS;
}
}
@@ -4766,14 +4727,14 @@ check_index(struct compiler *c, expr_ty e, expr_ty s)
if (index_type == NULL
|| PyType_FastSubclass(index_type, Py_TPFLAGS_LONG_SUBCLASS)
|| index_type == &PySlice_Type) {
- return 1;
+ return SUCCESS;
}
switch (e->kind) {
case Constant_kind:
v = e->v.Constant.value;
if (!(PyUnicode_Check(v) || PyBytes_Check(v) || PyTuple_Check(v))) {
- return 1;
+ return SUCCESS;
}
/* fall through */
case Tuple_kind:
@@ -4789,7 +4750,7 @@ check_index(struct compiler *c, expr_ty e, expr_ty s)
index_type->tp_name);
}
default:
- return 1;
+ return SUCCESS;
}
}
@@ -4838,7 +4799,7 @@ update_start_location_to_match_attr(struct compiler *c, location loc,
return loc;
}
-// Return 1 if the method call was optimized, -1 if not, and 0 on error.
+// Return 1 if the method call was optimized, 0 if not, and -1 on error.
static int
maybe_optimize_method_call(struct compiler *c, expr_ty e)
{
@@ -4849,32 +4810,32 @@ maybe_optimize_method_call(struct compiler *c, expr_ty e)
/* Check that the call node is an attribute access */
if (meth->kind != Attribute_kind || meth->v.Attribute.ctx != Load) {
- return -1;
+ return 0;
}
/* Check that the base object is not something that is imported */
if (is_import_originated(c, meth->v.Attribute.value)) {
- return -1;
+ return 0;
}
/* Check that there aren't too many arguments */
argsl = asdl_seq_LEN(args);
kwdsl = asdl_seq_LEN(kwds);
if (argsl + kwdsl + (kwdsl != 0) >= STACK_USE_GUIDELINE) {
- return -1;
+ return 0;
}
/* Check that there are no *varargs types of arguments. */
for (i = 0; i < argsl; i++) {
expr_ty elt = asdl_seq_GET(args, i);
if (elt->kind == Starred_kind) {
- return -1;
+ return 0;
}
}
for (i = 0; i < kwdsl; i++) {
keyword_ty kw = asdl_seq_GET(kwds, i);
if (kw->arg == NULL) {
- return -1;
+ return 0;
}
}
/* Alright, we can optimize the code. */
@@ -4886,9 +4847,8 @@ maybe_optimize_method_call(struct compiler *c, expr_ty e)
if (kwdsl) {
VISIT_SEQ(c, keyword, kwds);
- if (!compiler_call_simple_kw_helper(c, loc, kwds, kwdsl)) {
- return 0;
- };
+ RETURN_IF_ERROR(
+ compiler_call_simple_kw_helper(c, loc, kwds, kwdsl));
}
loc = update_start_location_to_match_attr(c, LOC(e), meth);
ADDOP_I(c, loc, CALL, argsl + kwdsl);
@@ -4906,32 +4866,31 @@ validate_keywords(struct compiler *c, asdl_keyword_seq *keywords)
}
location loc = LOC(key);
if (forbidden_name(c, loc, key->arg, Store)) {
- return -1;
+ return ERROR;
}
for (Py_ssize_t j = i + 1; j < nkeywords; j++) {
keyword_ty other = ((keyword_ty)asdl_seq_GET(keywords, j));
if (other->arg && !PyUnicode_Compare(key->arg, other->arg)) {
compiler_error(c, LOC(other), "keyword argument repeated: %U", key->arg);
- return -1;
+ return ERROR;
}
}
}
- return 0;
+ return SUCCESS;
}
static int
compiler_call(struct compiler *c, expr_ty e)
{
- if (validate_keywords(c, e->v.Call.keywords) == -1) {
- return 0;
- }
+ RETURN_IF_ERROR(validate_keywords(c, e->v.Call.keywords));
int ret = maybe_optimize_method_call(c, e);
- if (ret >= 0) {
- return ret;
+ if (ret < 0) {
+ return ERROR;
}
- if (!check_caller(c, e->v.Call.func)) {
- return 0;
+ if (ret == 1) {
+ return SUCCESS;
}
+ RETURN_IF_ERROR(check_caller(c, e->v.Call.func));
location loc = LOC(e->v.Call.func);
ADDOP(c, loc, PUSH_NULL);
VISIT(c, expr, e->v.Call.func);
@@ -4963,7 +4922,7 @@ compiler_joined_str(struct compiler *c, expr_ty e)
ADDOP_I(c, loc, BUILD_STRING, asdl_seq_LEN(e->v.JoinedStr.values));
}
}
- return 1;
+ return SUCCESS;
}
/* Used to implement f-strings. Format a single value. */
@@ -4998,7 +4957,7 @@ compiler_formatted_value(struct compiler *c, expr_ty e)
default:
PyErr_Format(PyExc_SystemError,
"Unrecognized conversion character %d", conversion);
- return 0;
+ return ERROR;
}
if (e->v.FormattedValue.format_spec) {
/* Evaluate the format spec, and update our opcode arg. */
@@ -5010,7 +4969,7 @@ compiler_formatted_value(struct compiler *c, expr_ty e)
location loc = LOC(e);
ADDOP_I(c, loc, FORMAT_VALUE, oparg);
- return 1;
+ return SUCCESS;
}
static int
@@ -5030,7 +4989,7 @@ compiler_subkwargs(struct compiler *c, location loc,
}
keys = PyTuple_New(n);
if (keys == NULL) {
- return 0;
+ return ERROR;
}
for (i = begin; i < end; i++) {
key = ((keyword_ty) asdl_seq_GET(keywords, i))->arg;
@@ -5038,7 +4997,7 @@ compiler_subkwargs(struct compiler *c, location loc,
}
ADDOP_LOAD_CONST_NEW(c, loc, keys);
ADDOP_I(c, loc, BUILD_CONST_KEY_MAP, n);
- return 1;
+ return SUCCESS;
}
if (big) {
ADDOP_I(c, NO_LOCATION, BUILD_MAP, 0);
@@ -5054,12 +5013,11 @@ compiler_subkwargs(struct compiler *c, location loc,
if (!big) {
ADDOP_I(c, loc, BUILD_MAP, n);
}
- return 1;
+ return SUCCESS;
}
/* Used by compiler_call_helper and maybe_optimize_method_call to emit
* KW_NAMES before CALL.
- * Returns 1 on success, 0 on error.
*/
static int
compiler_call_simple_kw_helper(struct compiler *c, location loc,
@@ -5068,7 +5026,7 @@ compiler_call_simple_kw_helper(struct compiler *c, location loc,
PyObject *names;
names = PyTuple_New(nkwelts);
if (names == NULL) {
- return 0;
+ return ERROR;
}
for (int i = 0; i < nkwelts; i++) {
keyword_ty kw = asdl_seq_GET(keywords, i);
@@ -5076,11 +5034,11 @@ compiler_call_simple_kw_helper(struct compiler *c, location loc,
}
Py_ssize_t arg = compiler_add_const(c, names);
if (arg < 0) {
- return 0;
+ return ERROR;
}
Py_DECREF(names);
ADDOP_I(c, loc, KW_NAMES, arg);
- return 1;
+ return SUCCESS;
}
@@ -5093,9 +5051,7 @@ compiler_call_helper(struct compiler *c, location loc,
{
Py_ssize_t i, nseen, nelts, nkwelts;
- if (validate_keywords(c, keywords) == -1) {
- return 0;
- }
+ RETURN_IF_ERROR(validate_keywords(c, keywords));
nelts = asdl_seq_LEN(args);
nkwelts = asdl_seq_LEN(keywords);
@@ -5124,12 +5080,11 @@ compiler_call_helper(struct compiler *c, location loc,
}
if (nkwelts) {
VISIT_SEQ(c, keyword, keywords);
- if (!compiler_call_simple_kw_helper(c, loc, keywords, nkwelts)) {
- return 0;
- };
+ RETURN_IF_ERROR(
+ compiler_call_simple_kw_helper(c, loc, keywords, nkwelts));
}
ADDOP_I(c, loc, CALL, n + nelts + nkwelts);
- return 1;
+ return SUCCESS;
ex_call:
@@ -5138,8 +5093,8 @@ compiler_call_helper(struct compiler *c, location loc,
VISIT(c, expr, ((expr_ty)asdl_seq_GET(args, 0))->v.Starred.value);
}
else if (starunpack_helper(c, loc, args, n, BUILD_LIST,
- LIST_APPEND, LIST_EXTEND, 1) == 0) {
- return 0;
+ LIST_APPEND, LIST_EXTEND, 1) < 0) {
+ return ERROR;
}
/* Then keyword arguments */
if (nkwelts) {
@@ -5152,9 +5107,7 @@ compiler_call_helper(struct compiler *c, location loc,
if (kw->arg == NULL) {
/* A keyword argument unpacking. */
if (nseen) {
- if (!compiler_subkwargs(c, loc, keywords, i - nseen, i)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_subkwargs(c, loc, keywords, i - nseen, i));
if (have_dict) {
ADDOP_I(c, loc, DICT_MERGE, 1);
}
@@ -5174,9 +5127,7 @@ compiler_call_helper(struct compiler *c, location loc,
}
if (nseen) {
/* Pack up any trailing keyword arguments. */
- if (!compiler_subkwargs(c, loc, keywords, nkwelts - nseen, nkwelts)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_subkwargs(c, loc, keywords, nkwelts - nseen, nkwelts));
if (have_dict) {
ADDOP_I(c, loc, DICT_MERGE, 1);
}
@@ -5185,7 +5136,7 @@ compiler_call_helper(struct compiler *c, location loc,
assert(have_dict);
}
ADDOP_I(c, loc, CALL_FUNCTION_EX, nkwelts > 0);
- return 1;
+ return SUCCESS;
}
@@ -5280,17 +5231,14 @@ compiler_sync_comprehension_generator(struct compiler *c, location loc,
Py_ssize_t n = asdl_seq_LEN(gen->ifs);
for (Py_ssize_t i = 0; i < n; i++) {
expr_ty e = (expr_ty)asdl_seq_GET(gen->ifs, i);
- if (!compiler_jump_if(c, loc, e, if_cleanup, 0)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_jump_if(c, loc, e, if_cleanup, 0));
}
if (++gen_index < asdl_seq_LEN(generators)) {
- if (!compiler_comprehension_generator(c, loc,
- generators, gen_index, depth,
- elt, val, type)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_comprehension_generator(c, loc,
+ generators, gen_index, depth,
+ elt, val, type));
}
location elt_loc = LOC(elt);
@@ -5324,7 +5272,7 @@ compiler_sync_comprehension_generator(struct compiler *c, location loc,
ADDOP_I(c, elt_loc, MAP_ADD, depth + 1);
break;
default:
- return 0;
+ return ERROR;
}
}
@@ -5336,7 +5284,7 @@ compiler_sync_comprehension_generator(struct compiler *c, location loc,
ADDOP(c, NO_LOCATION, END_FOR);
}
- return 1;
+ return SUCCESS;
}
static int
@@ -5365,10 +5313,9 @@ compiler_async_comprehension_generator(struct compiler *c, location loc,
USE_LABEL(c, start);
/* Runtime will push a block here, so we need to account for that */
- if (!compiler_push_fblock(c, loc, ASYNC_COMPREHENSION_GENERATOR,
- start, NO_LABEL, NULL)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_push_fblock(c, loc, ASYNC_COMPREHENSION_GENERATOR,
+ start, NO_LABEL, NULL));
ADDOP_JUMP(c, loc, SETUP_FINALLY, except);
ADDOP(c, loc, GET_ANEXT);
@@ -5380,18 +5327,15 @@ compiler_async_comprehension_generator(struct compiler *c, location loc,
Py_ssize_t n = asdl_seq_LEN(gen->ifs);
for (Py_ssize_t i = 0; i < n; i++) {
expr_ty e = (expr_ty)asdl_seq_GET(gen->ifs, i);
- if (!compiler_jump_if(c, loc, e, if_cleanup, 0)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_jump_if(c, loc, e, if_cleanup, 0));
}
depth++;
if (++gen_index < asdl_seq_LEN(generators)) {
- if (!compiler_comprehension_generator(c, loc,
- generators, gen_index, depth,
- elt, val, type)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_comprehension_generator(c, loc,
+ generators, gen_index, depth,
+ elt, val, type));
}
location elt_loc = LOC(elt);
@@ -5424,7 +5368,7 @@ compiler_async_comprehension_generator(struct compiler *c, location loc,
ADDOP_I(c, elt_loc, MAP_ADD, depth + 1);
break;
default:
- return 0;
+ return ERROR;
}
}
@@ -5437,7 +5381,7 @@ compiler_async_comprehension_generator(struct compiler *c, location loc,
ADDOP(c, loc, END_ASYNC_FOR);
- return 1;
+ return SUCCESS;
}
static int
@@ -5453,8 +5397,8 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type,
int is_top_level_await = IS_TOP_LEVEL_AWAIT(c);
outermost = (comprehension_ty) asdl_seq_GET(generators, 0);
- if (!compiler_enter_scope(c, name, COMPILER_SCOPE_COMPREHENSION,
- (void *)e, e->lineno))
+ if (compiler_enter_scope(c, name, COMPILER_SCOPE_COMPREHENSION,
+ (void *)e, e->lineno) < 0)
{
goto error;
}
@@ -5493,8 +5437,8 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type,
ADDOP_I(c, loc, op, 0);
}
- if (!compiler_comprehension_generator(c, loc, generators, 0, 0,
- elt, val, type)) {
+ if (compiler_comprehension_generator(c, loc, generators, 0, 0,
+ elt, val, type) < 0) {
goto error_in_scope;
}
@@ -5502,7 +5446,7 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type,
ADDOP(c, LOC(e), RETURN_VALUE);
}
if (type == COMP_GENEXP) {
- if (!wrap_in_stopiteration_handler(c)) {
+ if (wrap_in_stopiteration_handler(c) < 0) {
goto error_in_scope;
}
}
@@ -5513,11 +5457,12 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type,
if (is_top_level_await && is_async_generator){
c->u->u_ste->ste_coroutine = 1;
}
- if (co == NULL)
+ if (co == NULL) {
goto error;
+ }
loc = LOC(e);
- if (!compiler_make_closure(c, loc, co, 0, qualname)) {
+ if (compiler_make_closure(c, loc, co, 0, qualname) < 0) {
goto error;
}
Py_DECREF(qualname);
@@ -5540,13 +5485,13 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type,
ADD_YIELD_FROM(c, loc, 1);
}
- return 1;
+ return SUCCESS;
error_in_scope:
compiler_exit_scope(c);
error:
Py_XDECREF(qualname);
Py_XDECREF(co);
- return 0;
+ return ERROR;
}
static int
@@ -5595,7 +5540,7 @@ static int
compiler_visit_keyword(struct compiler *c, keyword_ty k)
{
VISIT(c, expr, k->value);
- return 1;
+ return SUCCESS;
}
@@ -5618,7 +5563,7 @@ compiler_with_except_finish(struct compiler *c, jump_target_label cleanup) {
POP_EXCEPT_AND_RERAISE(c, NO_LOCATION);
USE_LABEL(c, exit);
- return 1;
+ return SUCCESS;
}
/*
@@ -5675,15 +5620,13 @@ compiler_async_with(struct compiler *c, stmt_ty s, int pos)
/* SETUP_WITH pushes a finally block. */
USE_LABEL(c, block);
- if (!compiler_push_fblock(c, loc, ASYNC_WITH, block, final, s)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_push_fblock(c, loc, ASYNC_WITH, block, final, s));
if (item->optional_vars) {
VISIT(c, expr, item->optional_vars);
}
else {
- /* Discard result from context.__aenter__() */
+ /* Discard result from context.__aenter__() */
ADDOP(c, loc, POP_TOP);
}
@@ -5692,8 +5635,8 @@ compiler_async_with(struct compiler *c, stmt_ty s, int pos)
/* BLOCK code */
VISIT_SEQ(c, stmt, s->v.AsyncWith.body)
}
- else if (!compiler_async_with(c, s, pos)) {
- return 0;
+ else {
+ RETURN_IF_ERROR(compiler_async_with(c, s, pos));
}
compiler_pop_fblock(c, ASYNC_WITH, block);
@@ -5704,8 +5647,7 @@ compiler_async_with(struct compiler *c, stmt_ty s, int pos)
/* For successful outcome:
* call __exit__(None, None, None)
*/
- if(!compiler_call_exit_with_nones(c, loc))
- return 0;
+ RETURN_IF_ERROR(compiler_call_exit_with_nones(c, loc));
ADDOP_I(c, loc, GET_AWAITABLE, 2);
ADDOP_LOAD_CONST(c, loc, Py_None);
ADD_YIELD_FROM(c, loc, 1);
@@ -5723,10 +5665,10 @@ compiler_async_with(struct compiler *c, stmt_ty s, int pos)
ADDOP_I(c, loc, GET_AWAITABLE, 2);
ADDOP_LOAD_CONST(c, loc, Py_None);
ADD_YIELD_FROM(c, loc, 1);
- compiler_with_except_finish(c, cleanup);
+ RETURN_IF_ERROR(compiler_with_except_finish(c, cleanup));
USE_LABEL(c, exit);
- return 1;
+ return SUCCESS;
}
@@ -5772,9 +5714,7 @@ compiler_with(struct compiler *c, stmt_ty s, int pos)
/* SETUP_WITH pushes a finally block. */
USE_LABEL(c, block);
- if (!compiler_push_fblock(c, loc, WITH, block, final, s)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_push_fblock(c, loc, WITH, block, final, s));
if (item->optional_vars) {
VISIT(c, expr, item->optional_vars);
@@ -5785,11 +5725,13 @@ compiler_with(struct compiler *c, stmt_ty s, int pos)
}
pos++;
- if (pos == asdl_seq_LEN(s->v.With.items))
+ if (pos == asdl_seq_LEN(s->v.With.items)) {
/* BLOCK code */
VISIT_SEQ(c, stmt, s->v.With.body)
- else if (!compiler_with(c, s, pos))
- return 0;
+ }
+ else {
+ RETURN_IF_ERROR(compiler_with(c, s, pos));
+ }
ADDOP(c, NO_LOCATION, POP_BLOCK);
compiler_pop_fblock(c, WITH, block);
@@ -5800,8 +5742,7 @@ compiler_with(struct compiler *c, stmt_ty s, int pos)
* call __exit__(None, None, None)
*/
loc = LOC(s);
- if (!compiler_call_exit_with_nones(c, loc))
- return 0;
+ RETURN_IF_ERROR(compiler_call_exit_with_nones(c, loc));
ADDOP(c, loc, POP_TOP);
ADDOP_JUMP(c, loc, JUMP, exit);
@@ -5811,10 +5752,10 @@ compiler_with(struct compiler *c, stmt_ty s, int pos)
ADDOP_JUMP(c, loc, SETUP_CLEANUP, cleanup);
ADDOP(c, loc, PUSH_EXC_INFO);
ADDOP(c, loc, WITH_EXCEPT_START);
- compiler_with_except_finish(c, cleanup);
+ RETURN_IF_ERROR(compiler_with_except_finish(c, cleanup));
USE_LABEL(c, exit);
- return 1;
+ return SUCCESS;
}
static int
@@ -5855,8 +5796,9 @@ compiler_visit_expr1(struct compiler *c, expr_ty e)
case DictComp_kind:
return compiler_dictcomp(c, e);
case Yield_kind:
- if (c->u->u_ste->ste_type != FunctionBlock)
+ if (c->u->u_ste->ste_type != FunctionBlock) {
return compiler_error(c, loc, "'yield' outside function");
+ }
if (e->v.Yield.value) {
VISIT(c, expr, e->v.Yield.value);
}
@@ -5866,12 +5808,12 @@ compiler_visit_expr1(struct compiler *c, expr_ty e)
ADDOP_YIELD(c, loc);
break;
case YieldFrom_kind:
- if (c->u->u_ste->ste_type != FunctionBlock)
+ if (c->u->u_ste->ste_type != FunctionBlock) {
return compiler_error(c, loc, "'yield' outside function");
-
- if (c->u->u_scope_type == COMPILER_SCOPE_ASYNC_FUNCTION)
+ }
+ if (c->u->u_scope_type == COMPILER_SCOPE_ASYNC_FUNCTION) {
return compiler_error(c, loc, "'yield from' inside async function");
-
+ }
VISIT(c, expr, e->v.YieldFrom.value);
ADDOP(c, loc, GET_YIELD_FROM_ITER);
ADDOP_LOAD_CONST(c, loc, Py_None);
@@ -5884,7 +5826,7 @@ compiler_visit_expr1(struct compiler *c, expr_ty e)
}
if (c->u->u_scope_type != COMPILER_SCOPE_ASYNC_FUNCTION &&
- c->u->u_scope_type != COMPILER_SCOPE_COMPREHENSION){
+ c->u->u_scope_type != COMPILER_SCOPE_COMPREHENSION) {
return compiler_error(c, loc, "'await' outside async function");
}
}
@@ -5916,7 +5858,7 @@ compiler_visit_expr1(struct compiler *c, expr_ty e)
break;
case Store:
if (forbidden_name(c, loc, e->v.Attribute.attr, e->v.Attribute.ctx)) {
- return 0;
+ return ERROR;
}
ADDOP_NAME(c, loc, STORE_ATTR, e->v.Attribute.attr, names);
break;
@@ -5942,9 +5884,7 @@ compiler_visit_expr1(struct compiler *c, expr_ty e)
case Slice_kind:
{
int n = compiler_slice(c, e);
- if (n == 0) {
- return 0;
- }
+ RETURN_IF_ERROR(n);
ADDOP_I(c, loc, BUILD_SLICE, n);
break;
}
@@ -5956,7 +5896,7 @@ compiler_visit_expr1(struct compiler *c, expr_ty e)
case Tuple_kind:
return compiler_tuple(c, e);
}
- return 1;
+ return SUCCESS;
}
static int
@@ -5991,9 +5931,7 @@ compiler_augassign(struct compiler *c, stmt_ty s)
case Subscript_kind:
VISIT(c, expr, e->v.Subscript.value);
if (is_two_element_slice(e->v.Subscript.slice)) {
- if (!compiler_slice(c, e->v.Subscript.slice)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_slice(c, e->v.Subscript.slice));
ADDOP_I(c, loc, COPY, 3);
ADDOP_I(c, loc, COPY, 3);
ADDOP_I(c, loc, COPY, 3);
@@ -6007,14 +5945,13 @@ compiler_augassign(struct compiler *c, stmt_ty s)
}
break;
case Name_kind:
- if (!compiler_nameop(c, loc, e->v.Name.id, Load))
- return 0;
+ RETURN_IF_ERROR(compiler_nameop(c, loc, e->v.Name.id, Load));
break;
default:
PyErr_Format(PyExc_SystemError,
"invalid node type (%d) for augmented assignment",
e->kind);
- return 0;
+ return ERROR;
}
loc = LOC(s);
@@ -6048,7 +5985,7 @@ compiler_augassign(struct compiler *c, stmt_ty s)
default:
Py_UNREACHABLE();
}
- return 1;
+ return SUCCESS;
}
static int
@@ -6056,7 +5993,7 @@ check_ann_expr(struct compiler *c, expr_ty e)
{
VISIT(c, expr, e);
ADDOP(c, LOC(e), POP_TOP);
- return 1;
+ return SUCCESS;
}
static int
@@ -6065,7 +6002,7 @@ check_annotation(struct compiler *c, stmt_ty s)
/* Annotations of complex targets does not produce anything
under annotations future */
if (c->c_future.ff_features & CO_FUTURE_ANNOTATIONS) {
- return 1;
+ return SUCCESS;
}
/* Annotations are only evaluated in a module or class. */
@@ -6073,7 +6010,7 @@ check_annotation(struct compiler *c, stmt_ty s)
c->u->u_scope_type == COMPILER_SCOPE_CLASS) {
return check_ann_expr(c, s->v.AnnAssign.annotation);
}
- return 1;
+ return SUCCESS;
}
static int
@@ -6082,26 +6019,24 @@ check_ann_subscr(struct compiler *c, expr_ty e)
/* We check that everything in a subscript is defined at runtime. */
switch (e->kind) {
case Slice_kind:
- if (e->v.Slice.lower && !check_ann_expr(c, e->v.Slice.lower)) {
- return 0;
+ if (e->v.Slice.lower && check_ann_expr(c, e->v.Slice.lower) < 0) {
+ return ERROR;
}
- if (e->v.Slice.upper && !check_ann_expr(c, e->v.Slice.upper)) {
- return 0;
+ if (e->v.Slice.upper && check_ann_expr(c, e->v.Slice.upper) < 0) {
+ return ERROR;
}
- if (e->v.Slice.step && !check_ann_expr(c, e->v.Slice.step)) {
- return 0;
+ if (e->v.Slice.step && check_ann_expr(c, e->v.Slice.step) < 0) {
+ return ERROR;
}
- return 1;
+ return SUCCESS;
case Tuple_kind: {
/* extended slice */
asdl_expr_seq *elts = e->v.Tuple.elts;
Py_ssize_t i, n = asdl_seq_LEN(elts);
for (i = 0; i < n; i++) {
- if (!check_ann_subscr(c, asdl_seq_GET(elts, i))) {
- return 0;
- }
+ RETURN_IF_ERROR(check_ann_subscr(c, asdl_seq_GET(elts, i)));
}
- return 1;
+ return SUCCESS;
}
default:
return check_ann_expr(c, e);
@@ -6124,8 +6059,9 @@ compiler_annassign(struct compiler *c, stmt_ty s)
}
switch (targ->kind) {
case Name_kind:
- if (forbidden_name(c, loc, targ->v.Name.id, Store))
- return 0;
+ if (forbidden_name(c, loc, targ->v.Name.id, Store)) {
+ return ERROR;
+ }
/* If we have a simple name in a module or class, store annotation. */
if (s->v.AnnAssign.simple &&
(c->u->u_scope_type == COMPILER_SCOPE_MODULE ||
@@ -6143,31 +6079,32 @@ compiler_annassign(struct compiler *c, stmt_ty s)
}
break;
case Attribute_kind:
- if (forbidden_name(c, loc, targ->v.Attribute.attr, Store))
- return 0;
+ if (forbidden_name(c, loc, targ->v.Attribute.attr, Store)) {
+ return ERROR;
+ }
if (!s->v.AnnAssign.value &&
- !check_ann_expr(c, targ->v.Attribute.value)) {
- return 0;
+ check_ann_expr(c, targ->v.Attribute.value) < 0) {
+ return ERROR;
}
break;
case Subscript_kind:
if (!s->v.AnnAssign.value &&
- (!check_ann_expr(c, targ->v.Subscript.value) ||
- !check_ann_subscr(c, targ->v.Subscript.slice))) {
- return 0;
+ (check_ann_expr(c, targ->v.Subscript.value) < 0 ||
+ check_ann_subscr(c, targ->v.Subscript.slice) < 0)) {
+ return ERROR;
}
break;
default:
PyErr_Format(PyExc_SystemError,
"invalid node type (%d) for annotated assignment",
targ->kind);
- return 0;
+ return ERROR;
}
/* Annotation is evaluated last. */
- if (!s->v.AnnAssign.simple && !check_annotation(c, s)) {
- return 0;
+ if (!s->v.AnnAssign.simple && check_annotation(c, s) < 0) {
+ return ERROR;
}
- return 1;
+ return SUCCESS;
}
/* Raises a SyntaxError and returns 0.
@@ -6183,7 +6120,7 @@ compiler_error(struct compiler *c, location loc,
PyObject *msg = PyUnicode_FromFormatV(format, vargs);
va_end(vargs);
if (msg == NULL) {
- return 0;
+ return ERROR;
}
PyObject *loc_obj = PyErr_ProgramTextObject(c->c_filename, loc.lineno);
if (loc_obj == NULL) {
@@ -6200,7 +6137,7 @@ compiler_error(struct compiler *c, location loc,
exit:
Py_DECREF(loc_obj);
Py_XDECREF(args);
- return 0;
+ return ERROR;
}
/* Emits a SyntaxWarning and returns 1 on success.
@@ -6216,7 +6153,7 @@ compiler_warn(struct compiler *c, location loc,
PyObject *msg = PyUnicode_FromFormatV(format, vargs);
va_end(vargs);
if (msg == NULL) {
- return 0;
+ return ERROR;
}
if (PyErr_WarnExplicitObject(PyExc_SyntaxWarning, msg, c->c_filename,
loc.lineno, NULL, NULL) < 0)
@@ -6229,10 +6166,10 @@ compiler_warn(struct compiler *c, location loc,
compiler_error(c, loc, PyUnicode_AsUTF8(msg));
}
Py_DECREF(msg);
- return 0;
+ return ERROR;
}
Py_DECREF(msg);
- return 1;
+ return SUCCESS;
}
static int
@@ -6243,19 +6180,13 @@ compiler_subscript(struct compiler *c, expr_ty e)
int op = 0;
if (ctx == Load) {
- if (!check_subscripter(c, e->v.Subscript.value)) {
- return 0;
- }
- if (!check_index(c, e->v.Subscript.value, e->v.Subscript.slice)) {
- return 0;
- }
+ RETURN_IF_ERROR(check_subscripter(c, e->v.Subscript.value));
+ RETURN_IF_ERROR(check_index(c, e->v.Subscript.value, e->v.Subscript.slice));
}
VISIT(c, expr, e->v.Subscript.value);
if (is_two_element_slice(e->v.Subscript.slice) && ctx != Del) {
- if (!compiler_slice(c, e->v.Subscript.slice)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_slice(c, e->v.Subscript.slice));
if (ctx == Load) {
ADDOP(c, loc, BINARY_SLICE);
}
@@ -6274,11 +6205,11 @@ compiler_subscript(struct compiler *c, expr_ty e)
assert(op);
ADDOP(c, loc, op);
}
- return 1;
+ return SUCCESS;
}
/* Returns the number of the values emitted,
- * thus are needed to build the slice, or 0 if there is an error. */
+ * thus are needed to build the slice, or -1 if there is an error. */
static int
compiler_slice(struct compiler *c, expr_ty s)
{
@@ -6336,20 +6267,20 @@ ensure_fail_pop(struct compiler *c, pattern_context *pc, Py_ssize_t n)
{
Py_ssize_t size = n + 1;
if (size <= pc->fail_pop_size) {
- return 1;
+ return SUCCESS;
}
Py_ssize_t needed = sizeof(jump_target_label) * size;
jump_target_label *resized = PyObject_Realloc(pc->fail_pop, needed);
if (resized == NULL) {
PyErr_NoMemory();
- return 0;
+ return ERROR;
}
pc->fail_pop = resized;
while (pc->fail_pop_size < size) {
NEW_JUMP_TARGET_LABEL(c, new_block);
pc->fail_pop[pc->fail_pop_size++] = new_block;
}
- return 1;
+ return SUCCESS;
}
// Use op to jump to the correct fail_pop block.
@@ -6360,9 +6291,9 @@ jump_to_fail_pop(struct compiler *c, location loc,
// Pop any items on the top of the stack, plus any objects we were going to
// capture on success:
Py_ssize_t pops = pc->on_top + PyList_GET_SIZE(pc->stores);
- RETURN_IF_FALSE(ensure_fail_pop(c, pc, pops));
+ RETURN_IF_ERROR(ensure_fail_pop(c, pc, pops));
ADDOP_JUMP(c, loc, op, pc->fail_pop[pops]);
- return 1;
+ return SUCCESS;
}
// Build all of the fail_pop blocks and reset fail_pop.
@@ -6372,21 +6303,21 @@ emit_and_reset_fail_pop(struct compiler *c, location loc,
{
if (!pc->fail_pop_size) {
assert(pc->fail_pop == NULL);
- return 1;
+ return SUCCESS;
}
while (--pc->fail_pop_size) {
USE_LABEL(c, pc->fail_pop[pc->fail_pop_size]);
- if (!cfg_builder_addop_noarg(CFG_BUILDER(c), POP_TOP, loc)) {
+ if (cfg_builder_addop_noarg(CFG_BUILDER(c), POP_TOP, loc) < 0) {
pc->fail_pop_size = 0;
PyObject_Free(pc->fail_pop);
pc->fail_pop = NULL;
- return 0;
+ return ERROR;
}
}
USE_LABEL(c, pc->fail_pop[0]);
PyObject_Free(pc->fail_pop);
pc->fail_pop = NULL;
- return 1;
+ return SUCCESS;
}
static int
@@ -6403,7 +6334,7 @@ pattern_helper_rotate(struct compiler *c, location loc, Py_ssize_t count)
while (1 < count) {
ADDOP_I(c, loc, SWAP, count--);
}
- return 1;
+ return SUCCESS;
}
static int
@@ -6412,23 +6343,22 @@ pattern_helper_store_name(struct compiler *c, location loc,
{
if (n == NULL) {
ADDOP(c, loc, POP_TOP);
- return 1;
+ return SUCCESS;
}
if (forbidden_name(c, loc, n, Store)) {
- return 0;
+ return ERROR;
}
// Can't assign to the same name twice:
int duplicate = PySequence_Contains(pc->stores, n);
- if (duplicate < 0) {
- return 0;
- }
+ RETURN_IF_ERROR(duplicate);
if (duplicate) {
return compiler_error_duplicate_store(c, loc, n);
}
// Rotate this object underneath any items we need to preserve:
Py_ssize_t rotations = pc->on_top + PyList_GET_SIZE(pc->stores) + 1;
- RETURN_IF_FALSE(pattern_helper_rotate(c, loc, rotations));
- return !PyList_Append(pc->stores, n);
+ RETURN_IF_ERROR(pattern_helper_rotate(c, loc, rotations));
+ RETURN_IF_ERROR(PyList_Append(pc->stores, n));
+ return SUCCESS;
}
@@ -6442,10 +6372,11 @@ pattern_unpack_helper(struct compiler *c, location loc,
pattern_ty elt = asdl_seq_GET(elts, i);
if (elt->kind == MatchStar_kind && !seen_star) {
if ((i >= (1 << 8)) ||
- (n-i-1 >= (INT_MAX >> 8)))
+ (n-i-1 >= (INT_MAX >> 8))) {
return compiler_error(c, loc,
"too many expressions in "
"star-unpacking sequence pattern");
+ }
ADDOP_I(c, loc, UNPACK_EX, (i + ((n-i-1) << 8)));
seen_star = 1;
}
@@ -6457,7 +6388,7 @@ pattern_unpack_helper(struct compiler *c, location loc,
if (!seen_star) {
ADDOP_I(c, loc, UNPACK_SEQUENCE, n);
}
- return 1;
+ return SUCCESS;
}
static int
@@ -6465,7 +6396,7 @@ pattern_helper_sequence_unpack(struct compiler *c, location loc,
asdl_pattern_seq *patterns, Py_ssize_t star,
pattern_context *pc)
{
- RETURN_IF_FALSE(pattern_unpack_helper(c, loc, patterns));
+ RETURN_IF_ERROR(pattern_unpack_helper(c, loc, patterns));
Py_ssize_t size = asdl_seq_LEN(patterns);
// We've now got a bunch of new subjects on the stack. They need to remain
// there after each subpattern match:
@@ -6474,9 +6405,9 @@ pattern_helper_sequence_unpack(struct compiler *c, location loc,
// One less item to keep track of each time we loop through:
pc->on_top--;
pattern_ty pattern = asdl_seq_GET(patterns, i);
- RETURN_IF_FALSE(compiler_pattern_subpattern(c, pattern, pc));
+ RETURN_IF_ERROR(compiler_pattern_subpattern(c, pattern, pc));
}
- return 1;
+ return SUCCESS;
}
// Like pattern_helper_sequence_unpack, but uses BINARY_SUBSCR instead of
@@ -6511,12 +6442,12 @@ pattern_helper_sequence_subscr(struct compiler *c, location loc,
ADDOP_BINARY(c, loc, Sub);
}
ADDOP(c, loc, BINARY_SUBSCR);
- RETURN_IF_FALSE(compiler_pattern_subpattern(c, pattern, pc));
+ RETURN_IF_ERROR(compiler_pattern_subpattern(c, pattern, pc));
}
// Pop the subject, we're done with it:
pc->on_top--;
ADDOP(c, loc, POP_TOP);
- return 1;
+ return SUCCESS;
}
// Like compiler_pattern, but turn off checks for irrefutability.
@@ -6526,9 +6457,9 @@ compiler_pattern_subpattern(struct compiler *c,
{
int allow_irrefutable = pc->allow_irrefutable;
pc->allow_irrefutable = 1;
- RETURN_IF_FALSE(compiler_pattern(c, p, pc));
+ RETURN_IF_ERROR(compiler_pattern(c, p, pc));
pc->allow_irrefutable = allow_irrefutable;
- return 1;
+ return SUCCESS;
}
static int
@@ -6550,19 +6481,20 @@ compiler_pattern_as(struct compiler *c, pattern_ty p, pattern_context *pc)
// Need to make a copy for (possibly) storing later:
pc->on_top++;
ADDOP_I(c, LOC(p), COPY, 1);
- RETURN_IF_FALSE(compiler_pattern(c, p->v.MatchAs.pattern, pc));
+ RETURN_IF_ERROR(compiler_pattern(c, p->v.MatchAs.pattern, pc));
// Success! Store it:
pc->on_top--;
- RETURN_IF_FALSE(pattern_helper_store_name(c, LOC(p), p->v.MatchAs.name, pc));
- return 1;
+ RETURN_IF_ERROR(pattern_helper_store_name(c, LOC(p), p->v.MatchAs.name, pc));
+ return SUCCESS;
}
static int
compiler_pattern_star(struct compiler *c, pattern_ty p, pattern_context *pc)
{
assert(p->kind == MatchStar_kind);
- RETURN_IF_FALSE(pattern_helper_store_name(c, LOC(p), p->v.MatchStar.name, pc));
- return 1;
+ RETURN_IF_ERROR(
+ pattern_helper_store_name(c, LOC(p), p->v.MatchStar.name, pc));
+ return SUCCESS;
}
static int
@@ -6575,18 +6507,18 @@ validate_kwd_attrs(struct compiler *c, asdl_identifier_seq *attrs, asdl_pattern_
identifier attr = ((identifier)asdl_seq_GET(attrs, i));
location loc = LOC((pattern_ty) asdl_seq_GET(patterns, i));
if (forbidden_name(c, loc, attr, Store)) {
- return -1;
+ return ERROR;
}
for (Py_ssize_t j = i + 1; j < nattrs; j++) {
identifier other = ((identifier)asdl_seq_GET(attrs, j));
if (!PyUnicode_Compare(attr, other)) {
location loc = LOC((pattern_ty) asdl_seq_GET(patterns, j));
compiler_error(c, loc, "attribute name repeated in class pattern: %U", attr);
- return -1;
+ return ERROR;
}
}
}
- return 0;
+ return SUCCESS;
}
static int
@@ -6610,11 +6542,13 @@ compiler_pattern_class(struct compiler *c, pattern_ty p, pattern_context *pc)
return compiler_error(c, LOC(p), e, p->v.MatchClass.cls);
}
if (nattrs) {
- RETURN_IF_FALSE(!validate_kwd_attrs(c, kwd_attrs, kwd_patterns));
+ RETURN_IF_ERROR(validate_kwd_attrs(c, kwd_attrs, kwd_patterns));
}
VISIT(c, expr, p->v.MatchClass.cls);
- PyObject *attr_names;
- RETURN_IF_FALSE(attr_names = PyTuple_New(nattrs));
+ PyObject *attr_names = PyTuple_New(nattrs);
+ if (attr_names == NULL) {
+ return ERROR;
+ }
Py_ssize_t i;
for (i = 0; i < nattrs; i++) {
PyObject *name = asdl_seq_GET(kwd_attrs, i);
@@ -6627,7 +6561,7 @@ compiler_pattern_class(struct compiler *c, pattern_ty p, pattern_context *pc)
ADDOP_I(c, LOC(p), IS_OP, 1);
// TOS is now a tuple of (nargs + nattrs) attributes (or None):
pc->on_top++;
- RETURN_IF_FALSE(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
+ RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
ADDOP_I(c, LOC(p), UNPACK_SEQUENCE, nargs + nattrs);
pc->on_top += nargs + nattrs - 1;
for (i = 0; i < nargs + nattrs; i++) {
@@ -6645,10 +6579,10 @@ compiler_pattern_class(struct compiler *c, pattern_ty p, pattern_context *pc)
ADDOP(c, LOC(p), POP_TOP);
continue;
}
- RETURN_IF_FALSE(compiler_pattern_subpattern(c, pattern, pc));
+ RETURN_IF_ERROR(compiler_pattern_subpattern(c, pattern, pc));
}
// Success! Pop the tuple of attributes:
- return 1;
+ return SUCCESS;
}
static int
@@ -6671,19 +6605,19 @@ compiler_pattern_mapping(struct compiler *c, pattern_ty p,
// We need to keep the subject on top during the mapping and length checks:
pc->on_top++;
ADDOP(c, LOC(p), MATCH_MAPPING);
- RETURN_IF_FALSE(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
+ RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
if (!size && !star_target) {
// If the pattern is just "{}", we're done! Pop the subject:
pc->on_top--;
ADDOP(c, LOC(p), POP_TOP);
- return 1;
+ return SUCCESS;
}
if (size) {
// If the pattern has any keys in it, perform a length check:
ADDOP(c, LOC(p), GET_LEN);
ADDOP_LOAD_CONST_NEW(c, LOC(p), PyLong_FromSsize_t(size));
ADDOP_COMPARE(c, LOC(p), GtE);
- RETURN_IF_FALSE(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
+ RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
}
if (INT_MAX < size - 1) {
return compiler_error(c, LOC(p), "too many sub-patterns in mapping pattern");
@@ -6695,7 +6629,7 @@ compiler_pattern_mapping(struct compiler *c, pattern_ty p,
// SyntaxError in the case of duplicates.
PyObject *seen = PySet_New(NULL);
if (seen == NULL) {
- return 0;
+ return ERROR;
}
// NOTE: goto error on failure in the loop below to avoid leaking `seen`
@@ -6729,7 +6663,7 @@ compiler_pattern_mapping(struct compiler *c, pattern_ty p,
compiler_error(c, LOC(p), e);
goto error;
}
- if (!compiler_visit_expr(c, key)) {
+ if (compiler_visit_expr(c, key) < 0) {
goto error;
}
}
@@ -6744,7 +6678,7 @@ compiler_pattern_mapping(struct compiler *c, pattern_ty p,
ADDOP_I(c, LOC(p), COPY, 1);
ADDOP_LOAD_CONST(c, LOC(p), Py_None);
ADDOP_I(c, LOC(p), IS_OP, 1);
- RETURN_IF_FALSE(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
+ RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
// So far so good. Use that tuple of values on the stack to match
// sub-patterns against:
ADDOP_I(c, LOC(p), UNPACK_SEQUENCE, size);
@@ -6752,7 +6686,7 @@ compiler_pattern_mapping(struct compiler *c, pattern_ty p,
for (Py_ssize_t i = 0; i < size; i++) {
pc->on_top--;
pattern_ty pattern = asdl_seq_GET(patterns, i);
- RETURN_IF_FALSE(compiler_pattern_subpattern(c, pattern, pc));
+ RETURN_IF_ERROR(compiler_pattern_subpattern(c, pattern, pc));
}
// If we get this far, it's a match! Whatever happens next should consume
// the tuple of keys and the subject:
@@ -6773,17 +6707,17 @@ compiler_pattern_mapping(struct compiler *c, pattern_ty p,
ADDOP_I(c, LOC(p), SWAP, 2); // [copy, keys..., copy, key]
ADDOP(c, LOC(p), DELETE_SUBSCR); // [copy, keys...]
}
- RETURN_IF_FALSE(pattern_helper_store_name(c, LOC(p), star_target, pc));
+ RETURN_IF_ERROR(pattern_helper_store_name(c, LOC(p), star_target, pc));
}
else {
ADDOP(c, LOC(p), POP_TOP); // Tuple of keys.
ADDOP(c, LOC(p), POP_TOP); // Subject.
}
- return 1;
+ return SUCCESS;
error:
Py_DECREF(seen);
- return 0;
+ return ERROR;
}
static int
@@ -6813,8 +6747,8 @@ compiler_pattern_or(struct compiler *c, pattern_ty p, pattern_context *pc)
pc->fail_pop = NULL;
pc->fail_pop_size = 0;
pc->on_top = 0;
- if (!cfg_builder_addop_i(CFG_BUILDER(c), COPY, 1, LOC(alt)) ||
- !compiler_pattern(c, alt, pc)) {
+ if (cfg_builder_addop_i(CFG_BUILDER(c), COPY, 1, LOC(alt)) < 0 ||
+ compiler_pattern(c, alt, pc) < 0) {
goto error;
}
// Success!
@@ -6868,7 +6802,7 @@ compiler_pattern_or(struct compiler *c, pattern_ty p, pattern_context *pc)
// Do the same thing to the stack, using several
// rotations:
while (rotations--) {
- if (!pattern_helper_rotate(c, LOC(alt), icontrol + 1)){
+ if (pattern_helper_rotate(c, LOC(alt), icontrol + 1) < 0) {
goto error;
}
}
@@ -6876,8 +6810,8 @@ compiler_pattern_or(struct compiler *c, pattern_ty p, pattern_context *pc)
}
}
assert(control);
- if (!cfg_builder_addop_j(CFG_BUILDER(c), LOC(alt), JUMP, end) ||
- !emit_and_reset_fail_pop(c, LOC(alt), pc))
+ if (cfg_builder_addop_j(CFG_BUILDER(c), LOC(alt), JUMP, end) < 0 ||
+ emit_and_reset_fail_pop(c, LOC(alt), pc) < 0)
{
goto error;
}
@@ -6888,8 +6822,8 @@ compiler_pattern_or(struct compiler *c, pattern_ty p, pattern_context *pc)
// Need to NULL this for the PyObject_Free call in the error block.
old_pc.fail_pop = NULL;
// No match. Pop the remaining copy of the subject and fail:
- if (!cfg_builder_addop_noarg(CFG_BUILDER(c), POP_TOP, LOC(p)) ||
- !jump_to_fail_pop(c, LOC(p), pc, JUMP)) {
+ if (cfg_builder_addop_noarg(CFG_BUILDER(c), POP_TOP, LOC(p)) < 0 ||
+ jump_to_fail_pop(c, LOC(p), pc, JUMP) < 0) {
goto error;
}
@@ -6904,7 +6838,7 @@ compiler_pattern_or(struct compiler *c, pattern_ty p, pattern_context *pc)
Py_ssize_t nrots = nstores + 1 + pc->on_top + PyList_GET_SIZE(pc->stores);
for (Py_ssize_t i = 0; i < nstores; i++) {
// Rotate this capture to its proper place on the stack:
- if (!pattern_helper_rotate(c, LOC(p), nrots)) {
+ if (pattern_helper_rotate(c, LOC(p), nrots) < 0) {
goto error;
}
// Update the list of previous stores with this new name, checking for
@@ -6927,14 +6861,14 @@ compiler_pattern_or(struct compiler *c, pattern_ty p, pattern_context *pc)
// NOTE: Returning macros are safe again.
// Pop the copy of the subject:
ADDOP(c, LOC(p), POP_TOP);
- return 1;
+ return SUCCESS;
diff:
compiler_error(c, LOC(p), "alternative patterns bind different names");
error:
PyObject_Free(old_pc.fail_pop);
Py_DECREF(old_pc.stores);
Py_XDECREF(control);
- return 0;
+ return ERROR;
}
@@ -6966,20 +6900,20 @@ compiler_pattern_sequence(struct compiler *c, pattern_ty p,
// We need to keep the subject on top during the sequence and length checks:
pc->on_top++;
ADDOP(c, LOC(p), MATCH_SEQUENCE);
- RETURN_IF_FALSE(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
+ RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
if (star < 0) {
// No star: len(subject) == size
ADDOP(c, LOC(p), GET_LEN);
ADDOP_LOAD_CONST_NEW(c, LOC(p), PyLong_FromSsize_t(size));
ADDOP_COMPARE(c, LOC(p), Eq);
- RETURN_IF_FALSE(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
+ RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
}
else if (size > 1) {
// Star: len(subject) >= size - 1
ADDOP(c, LOC(p), GET_LEN);
ADDOP_LOAD_CONST_NEW(c, LOC(p), PyLong_FromSsize_t(size - 1));
ADDOP_COMPARE(c, LOC(p), GtE);
- RETURN_IF_FALSE(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
+ RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
}
// Whatever comes next should consume the subject:
pc->on_top--;
@@ -6988,12 +6922,12 @@ compiler_pattern_sequence(struct compiler *c, pattern_ty p,
ADDOP(c, LOC(p), POP_TOP);
}
else if (star_wildcard) {
- RETURN_IF_FALSE(pattern_helper_sequence_subscr(c, LOC(p), patterns, star, pc));
+ RETURN_IF_ERROR(pattern_helper_sequence_subscr(c, LOC(p), patterns, star, pc));
}
else {
- RETURN_IF_FALSE(pattern_helper_sequence_unpack(c, LOC(p), patterns, star, pc));
+ RETURN_IF_ERROR(pattern_helper_sequence_unpack(c, LOC(p), patterns, star, pc));
}
- return 1;
+ return SUCCESS;
}
static int
@@ -7007,8 +6941,8 @@ compiler_pattern_value(struct compiler *c, pattern_ty p, pattern_context *pc)
}
VISIT(c, expr, value);
ADDOP_COMPARE(c, LOC(p), Eq);
- RETURN_IF_FALSE(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
- return 1;
+ RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
+ return SUCCESS;
}
static int
@@ -7017,8 +6951,8 @@ compiler_pattern_singleton(struct compiler *c, pattern_ty p, pattern_context *pc
assert(p->kind == MatchSingleton_kind);
ADDOP_LOAD_CONST(c, LOC(p), p->v.MatchSingleton.value);
ADDOP_COMPARE(c, LOC(p), Is);
- RETURN_IF_FALSE(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
- return 1;
+ RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
+ return SUCCESS;
}
static int
@@ -7063,32 +6997,35 @@ compiler_match_inner(struct compiler *c, stmt_ty s, pattern_context *pc)
if (i != cases - has_default - 1) {
ADDOP_I(c, LOC(m->pattern), COPY, 1);
}
- RETURN_IF_FALSE(pc->stores = PyList_New(0));
+ pc->stores = PyList_New(0);
+ if (pc->stores == NULL) {
+ return ERROR;
+ }
// Irrefutable cases must be either guarded, last, or both:
pc->allow_irrefutable = m->guard != NULL || i == cases - 1;
pc->fail_pop = NULL;
pc->fail_pop_size = 0;
pc->on_top = 0;
// NOTE: Can't use returning macros here (they'll leak pc->stores)!
- if (!compiler_pattern(c, m->pattern, pc)) {
+ if (compiler_pattern(c, m->pattern, pc) < 0) {
Py_DECREF(pc->stores);
- return 0;
+ return ERROR;
}
assert(!pc->on_top);
// It's a match! Store all of the captured names (they're on the stack).
Py_ssize_t nstores = PyList_GET_SIZE(pc->stores);
for (Py_ssize_t n = 0; n < nstores; n++) {
PyObject *name = PyList_GET_ITEM(pc->stores, n);
- if (!compiler_nameop(c, LOC(m->pattern), name, Store)) {
+ if (compiler_nameop(c, LOC(m->pattern), name, Store) < 0) {
Py_DECREF(pc->stores);
- return 0;
+ return ERROR;
}
}
Py_DECREF(pc->stores);
// NOTE: Returning macros are safe again.
if (m->guard) {
- RETURN_IF_FALSE(ensure_fail_pop(c, pc, 0));
- RETURN_IF_FALSE(compiler_jump_if(c, LOC(m->pattern), m->guard, pc->fail_pop[0], 0));
+ RETURN_IF_ERROR(ensure_fail_pop(c, pc, 0));
+ RETURN_IF_ERROR(compiler_jump_if(c, LOC(m->pattern), m->guard, pc->fail_pop[0], 0));
}
// Success! Pop the subject off, we're done with it:
if (i != cases - has_default - 1) {
@@ -7099,7 +7036,7 @@ compiler_match_inner(struct compiler *c, stmt_ty s, pattern_context *pc)
// If the pattern fails to match, we want the line number of the
// cleanup to be associated with the failed pattern, not the last line
// of the body
- RETURN_IF_FALSE(emit_and_reset_fail_pop(c, LOC(m->pattern), pc));
+ RETURN_IF_ERROR(emit_and_reset_fail_pop(c, LOC(m->pattern), pc));
}
if (has_default) {
// A trailing "case _" is common, and lets us save a bit of redundant
@@ -7114,12 +7051,12 @@ compiler_match_inner(struct compiler *c, stmt_ty s, pattern_context *pc)
ADDOP(c, LOC(m->pattern), NOP);
}
if (m->guard) {
- RETURN_IF_FALSE(compiler_jump_if(c, LOC(m->pattern), m->guard, end, 0));
+ RETURN_IF_ERROR(compiler_jump_if(c, LOC(m->pattern), m->guard, end, 0));
}
VISIT_SEQ(c, stmt, m->body);
}
USE_LABEL(c, end);
- return 1;
+ return SUCCESS;
}
static int
@@ -7268,12 +7205,12 @@ assemble_init(struct assembler *a, int firstlineno)
if (a->a_except_table == NULL) {
goto error;
}
- return 1;
+ return 0;
error:
Py_XDECREF(a->a_bytecode);
Py_XDECREF(a->a_linetable);
Py_XDECREF(a->a_except_table);
- return 0;
+ return -1;
}
static void
@@ -7667,8 +7604,9 @@ assemble_emit_exception_table_entry(struct assembler *a, int start, int end, bas
{
Py_ssize_t len = PyBytes_GET_SIZE(a->a_except_table);
if (a->a_except_table_off + MAX_SIZE_OF_ENTRY >= len) {
- if (_PyBytes_Resize(&a->a_except_table, len * 2) < 0)
- return 0;
+ if (_PyBytes_Resize(&a->a_except_table, len * 2) < 0) {
+ return -1;
+ }
}
int size = end-start;
assert(end > start);
@@ -7683,7 +7621,7 @@ assemble_emit_exception_table_entry(struct assembler *a, int start, int end, bas
assemble_emit_exception_table_item(a, size, 0);
assemble_emit_exception_table_item(a, target, 0);
assemble_emit_exception_table_item(a, depth_lasti, 0);
- return 1;
+ return 0;
}
static int
@@ -7699,7 +7637,9 @@ assemble_exception_table(struct assembler *a, basicblock *entryblock)
struct instr *instr = &b->b_instr[i];
if (instr->i_except != handler) {
if (handler != NULL) {
- RETURN_IF_FALSE(assemble_emit_exception_table_entry(a, start, ioffset, handler));
+ if (assemble_emit_exception_table_entry(a, start, ioffset, handler) < 0) {
+ return -1;
+ }
}
start = ioffset;
handler = instr->i_except;
@@ -7708,9 +7648,11 @@ assemble_exception_table(struct assembler *a, basicblock *entryblock)
}
}
if (handler != NULL) {
- RETURN_IF_FALSE(assemble_emit_exception_table_entry(a, start, ioffset, handler));
+ if (assemble_emit_exception_table_entry(a, start, ioffset, handler) < 0) {
+ return -1;
+ }
}
- return 1;
+ return 0;
}
/* Code location emitting code. See locations.md for a description of the format. */
@@ -7813,12 +7755,12 @@ write_location_info_entry(struct assembler* a, struct instr* i, int isize)
if (a->a_location_off + THEORETICAL_MAX_ENTRY_SIZE >= len) {
assert(len > THEORETICAL_MAX_ENTRY_SIZE);
if (_PyBytes_Resize(&a->a_linetable, len*2) < 0) {
- return 0;
+ return -1;
}
}
if (i->i_loc.lineno < 0) {
write_location_info_none(a, isize);
- return 1;
+ return 0;
}
int line_delta = i->i_loc.lineno - a->a_lineno;
int column = i->i_loc.col_offset;
@@ -7829,23 +7771,23 @@ write_location_info_entry(struct assembler* a, struct instr* i, int isize)
if (i->i_loc.end_lineno == i->i_loc.lineno || i->i_loc.end_lineno == -1) {
write_location_info_no_column(a, isize, line_delta);
a->a_lineno = i->i_loc.lineno;
- return 1;
+ return 0;
}
}
else if (i->i_loc.end_lineno == i->i_loc.lineno) {
if (line_delta == 0 && column < 80 && end_column - column < 16 && end_column >= column) {
write_location_info_short_form(a, isize, column, end_column);
- return 1;
+ return 0;
}
if (line_delta >= 0 && line_delta < 3 && column < 128 && end_column < 128) {
write_location_info_oneline_form(a, isize, line_delta, column, end_column);
a->a_lineno = i->i_loc.lineno;
- return 1;
+ return 0;
}
}
write_location_info_long_form(a, i, isize);
a->a_lineno = i->i_loc.lineno;
- return 1;
+ return 0;
}
static int
@@ -7853,8 +7795,8 @@ assemble_emit_location(struct assembler* a, struct instr* i)
{
int isize = instr_size(i);
while (isize > 8) {
- if (!write_location_info_entry(a, i, 8)) {
- return 0;
+ if (write_location_info_entry(a, i, 8) < 0) {
+ return -1;
}
isize -= 8;
}
@@ -7874,15 +7816,17 @@ assemble_emit(struct assembler *a, struct instr *i)
int size = instr_size(i);
if (a->a_offset + size >= len / (int)sizeof(_Py_CODEUNIT)) {
- if (len > PY_SSIZE_T_MAX / 2)
- return 0;
- if (_PyBytes_Resize(&a->a_bytecode, len * 2) < 0)
- return 0;
+ if (len > PY_SSIZE_T_MAX / 2) {
+ return -1;
+ }
+ if (_PyBytes_Resize(&a->a_bytecode, len * 2) < 0) {
+ return -1;
+ }
}
code = (_Py_CODEUNIT *)PyBytes_AS_STRING(a->a_bytecode) + a->a_offset;
a->a_offset += size;
write_instr(code, i, size);
- return 1;
+ return 0;
}
static int
@@ -8282,17 +8226,17 @@ merge_const_one(PyObject *const_cache, PyObject **obj)
PyDict_CheckExact(const_cache);
PyObject *key = _PyCode_ConstantKey(*obj);
if (key == NULL) {
- return 0;
+ return -1;
}
// t is borrowed reference
PyObject *t = PyDict_SetDefault(const_cache, key, key);
Py_DECREF(key);
if (t == NULL) {
- return 0;
+ return -1;
}
if (t == key) { // obj is new constant.
- return 1;
+ return 0;
}
if (PyTuple_CheckExact(t)) {
@@ -8301,7 +8245,7 @@ merge_const_one(PyObject *const_cache, PyObject **obj)
}
Py_SETREF(*obj, Py_NewRef(t));
- return 1;
+ return 0;
}
// This is in codeobject.c.
@@ -8367,7 +8311,7 @@ makecode(struct compiler *c, struct assembler *a, PyObject *constslist,
if (!names) {
goto error;
}
- if (!merge_const_one(c->c_const_cache, &names)) {
+ if (merge_const_one(c->c_const_cache, &names) < 0) {
goto error;
}
@@ -8375,7 +8319,7 @@ makecode(struct compiler *c, struct assembler *a, PyObject *constslist,
if (consts == NULL) {
goto error;
}
- if (!merge_const_one(c->c_const_cache, &consts)) {
+ if (merge_const_one(c->c_const_cache, &consts) < 0) {
goto error;
}
@@ -8426,7 +8370,7 @@ makecode(struct compiler *c, struct assembler *a, PyObject *constslist,
goto error;
}
- if (!merge_const_one(c->c_const_cache, &localsplusnames)) {
+ if (merge_const_one(c->c_const_cache, &localsplusnames) < 0) {
goto error;
}
con.localsplusnames = localsplusnames;
@@ -8785,6 +8729,19 @@ prepare_localsplus(struct compiler* c, int code_flags)
return nlocalsplus;
}
+static int
+add_return_at_end_of_block(struct compiler *c, int addNone)
+{
+ /* Make sure every block that falls off the end returns None. */
+ if (!basicblock_returns(CFG_BUILDER(c)->g_curblock)) {
+ if (addNone) {
+ ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None);
+ }
+ ADDOP(c, NO_LOCATION, RETURN_VALUE);
+ }
+ return SUCCESS;
+}
+
static PyCodeObject *
assemble(struct compiler *c, int addNone)
{
@@ -8798,12 +8755,8 @@ assemble(struct compiler *c, int addNone)
return NULL;
}
- /* Make sure every block that falls off the end returns None. */
- if (!basicblock_returns(CFG_BUILDER(c)->g_curblock)) {
- if (addNone) {
- ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None);
- }
- ADDOP(c, NO_LOCATION, RETURN_VALUE);
+ if (add_return_at_end_of_block(c, addNone) < 0) {
+ return NULL;
}
int nblocks = 0;
@@ -8892,45 +8845,50 @@ assemble(struct compiler *c, int addNone)
assemble_jump_offsets(g->g_entryblock);
/* Create assembler */
- if (!assemble_init(&a, c->u->u_firstlineno))
+ if (assemble_init(&a, c->u->u_firstlineno) < 0) {
goto error;
+ }
/* Emit code. */
for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) {
- for (int j = 0; j < b->b_iused; j++)
- if (!assemble_emit(&a, &b->b_instr[j]))
+ for (int j = 0; j < b->b_iused; j++) {
+ if (assemble_emit(&a, &b->b_instr[j]) < 0) {
goto error;
+ }
+ }
}
/* Emit location info */
a.a_lineno = c->u->u_firstlineno;
for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) {
- for (int j = 0; j < b->b_iused; j++)
- if (!assemble_emit_location(&a, &b->b_instr[j]))
+ for (int j = 0; j < b->b_iused; j++) {
+ if (assemble_emit_location(&a, &b->b_instr[j]) < 0) {
goto error;
+ }
+ }
}
- if (!assemble_exception_table(&a, g->g_entryblock)) {
+ if (assemble_exception_table(&a, g->g_entryblock) < 0) {
goto error;
}
if (_PyBytes_Resize(&a.a_except_table, a.a_except_table_off) < 0) {
goto error;
}
- if (!merge_const_one(c->c_const_cache, &a.a_except_table)) {
+ if (merge_const_one(c->c_const_cache, &a.a_except_table) < 0) {
goto error;
}
if (_PyBytes_Resize(&a.a_linetable, a.a_location_off) < 0) {
goto error;
}
- if (!merge_const_one(c->c_const_cache, &a.a_linetable)) {
+ if (merge_const_one(c->c_const_cache, &a.a_linetable) < 0) {
goto error;
}
if (_PyBytes_Resize(&a.a_bytecode, a.a_offset * sizeof(_Py_CODEUNIT)) < 0) {
goto error;
}
- if (!merge_const_one(c->c_const_cache, &a.a_bytecode)) {
+ if (merge_const_one(c->c_const_cache, &a.a_bytecode) < 0) {
goto error;
}
@@ -8995,7 +8953,7 @@ fold_tuple_on_constants(PyObject *const_cache,
}
PyTuple_SET_ITEM(newconst, i, constant);
}
- if (merge_const_one(const_cache, &newconst) == 0) {
+ if (merge_const_one(const_cache, &newconst) < 0) {
Py_DECREF(newconst);
return -1;
}
@@ -9849,17 +9807,17 @@ remove_unused_consts(basicblock *entryblock, PyObject *consts)
return err;
}
-static inline int
+static inline bool
is_exit_without_lineno(basicblock *b) {
if (!basicblock_exits_scope(b)) {
- return 0;
+ return false;
}
for (int i = 0; i < b->b_iused; i++) {
if (b->b_instr[i].i_loc.lineno >= 0) {
- return 0;
+ return false;
}
}
- return 1;
+ return true;
}
/* PEP 626 mandates that the f_lineno of a frame is correct
@@ -9980,7 +9938,7 @@ instructions_to_cfg(PyObject *instructions, cfg_builder *g)
if (PyErr_Occurred()) {
return -1;
}
- if (!cfg_builder_addop(g, opcode, oparg, loc)) {
+ if (cfg_builder_addop(g, opcode, oparg, loc) < 0) {
return -1;
}
}
@@ -10065,7 +10023,7 @@ _PyCompile_CodeGen(PyObject *ast, PyObject *filename, PyCompilerFlags *pflags,
return NULL;
}
- if (!compiler_codegen(c, mod)) {
+ if (compiler_codegen(c, mod) < 0) {
goto finally;
}
diff --git a/Python/dtoa.c b/Python/dtoa.c
index 1b47d83bf77a24..cff5f1b0658eae 100644
--- a/Python/dtoa.c
+++ b/Python/dtoa.c
@@ -673,10 +673,6 @@ mult(Bigint *a, Bigint *b)
#ifndef Py_USING_MEMORY_DEBUGGER
-/* p5s is a linked list of powers of 5 of the form 5**(2**i), i >= 2 */
-
-static Bigint *p5s;
-
/* multiply the Bigint b by 5**k. Returns a pointer to the result, or NULL on
failure; if the returned pointer is distinct from b then the original
Bigint b will have been Bfree'd. Ignores the sign of b. */
@@ -696,7 +692,7 @@ pow5mult(Bigint *b, int k)
if (!(k >>= 2))
return b;
- p5 = p5s;
+ p5 = _PyRuntime.dtoa.p5s;
if (!p5) {
/* first time */
p5 = i2b(625);
@@ -704,7 +700,7 @@ pow5mult(Bigint *b, int k)
Bfree(b);
return NULL;
}
- p5s = p5;
+ _PyRuntime.dtoa.p5s = p5;
p5->next = 0;
}
for(;;) {
diff --git a/Python/frame.c b/Python/frame.c
index 52f6ef428291c5..b1525cca511224 100644
--- a/Python/frame.c
+++ b/Python/frame.c
@@ -127,6 +127,9 @@ _PyFrame_Clear(_PyInterpreterFrame *frame)
* to have cleared the enclosing generator, if any. */
assert(frame->owner != FRAME_OWNED_BY_GENERATOR ||
_PyFrame_GetGenerator(frame)->gi_frame_state == FRAME_CLEARED);
+ // GH-99729: Clearing this frame can expose the stack (via finalizers). It's
+ // crucial that this frame has been unlinked, and is no longer visible:
+ assert(_PyThreadState_GET()->cframe->current_frame != frame);
if (frame->frame_obj) {
PyFrameObject *f = frame->frame_obj;
frame->frame_obj = NULL;
diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h
index 3af60b83d84e70..63635fbfc2f4cb 100644
--- a/Python/generated_cases.c.h
+++ b/Python/generated_cases.c.h
@@ -63,6 +63,115 @@
DISPATCH();
}
+ TARGET(LOAD_FAST__LOAD_FAST) {
+ PyObject *_tmp_1;
+ PyObject *_tmp_2;
+ {
+ PyObject *value;
+ value = GETLOCAL(oparg);
+ assert(value != NULL);
+ Py_INCREF(value);
+ _tmp_2 = value;
+ }
+ NEXTOPARG();
+ JUMPBY(1);
+ {
+ PyObject *value;
+ value = GETLOCAL(oparg);
+ assert(value != NULL);
+ Py_INCREF(value);
+ _tmp_1 = value;
+ }
+ STACK_GROW(2);
+ POKE(1, _tmp_1);
+ POKE(2, _tmp_2);
+ DISPATCH();
+ }
+
+ TARGET(LOAD_FAST__LOAD_CONST) {
+ PyObject *_tmp_1;
+ PyObject *_tmp_2;
+ {
+ PyObject *value;
+ value = GETLOCAL(oparg);
+ assert(value != NULL);
+ Py_INCREF(value);
+ _tmp_2 = value;
+ }
+ NEXTOPARG();
+ JUMPBY(1);
+ {
+ PyObject *value;
+ value = GETITEM(consts, oparg);
+ Py_INCREF(value);
+ _tmp_1 = value;
+ }
+ STACK_GROW(2);
+ POKE(1, _tmp_1);
+ POKE(2, _tmp_2);
+ DISPATCH();
+ }
+
+ TARGET(STORE_FAST__LOAD_FAST) {
+ PyObject *_tmp_1 = PEEK(1);
+ {
+ PyObject *value = _tmp_1;
+ SETLOCAL(oparg, value);
+ }
+ NEXTOPARG();
+ JUMPBY(1);
+ {
+ PyObject *value;
+ value = GETLOCAL(oparg);
+ assert(value != NULL);
+ Py_INCREF(value);
+ _tmp_1 = value;
+ }
+ POKE(1, _tmp_1);
+ DISPATCH();
+ }
+
+ TARGET(STORE_FAST__STORE_FAST) {
+ PyObject *_tmp_1 = PEEK(1);
+ PyObject *_tmp_2 = PEEK(2);
+ {
+ PyObject *value = _tmp_1;
+ SETLOCAL(oparg, value);
+ }
+ NEXTOPARG();
+ JUMPBY(1);
+ {
+ PyObject *value = _tmp_2;
+ SETLOCAL(oparg, value);
+ }
+ STACK_SHRINK(2);
+ DISPATCH();
+ }
+
+ TARGET(LOAD_CONST__LOAD_FAST) {
+ PyObject *_tmp_1;
+ PyObject *_tmp_2;
+ {
+ PyObject *value;
+ value = GETITEM(consts, oparg);
+ Py_INCREF(value);
+ _tmp_2 = value;
+ }
+ NEXTOPARG();
+ JUMPBY(1);
+ {
+ PyObject *value;
+ value = GETLOCAL(oparg);
+ assert(value != NULL);
+ Py_INCREF(value);
+ _tmp_1 = value;
+ }
+ STACK_GROW(2);
+ POKE(1, _tmp_1);
+ POKE(2, _tmp_2);
+ DISPATCH();
+ }
+
TARGET(POP_TOP) {
PyObject *value = PEEK(1);
Py_DECREF(value);
@@ -78,6 +187,21 @@
DISPATCH();
}
+ TARGET(END_FOR) {
+ PyObject *_tmp_1 = PEEK(1);
+ PyObject *_tmp_2 = PEEK(2);
+ {
+ PyObject *value = _tmp_1;
+ Py_DECREF(value);
+ }
+ {
+ PyObject *value = _tmp_2;
+ Py_DECREF(value);
+ }
+ STACK_SHRINK(2);
+ DISPATCH();
+ }
+
TARGET(UNARY_POSITIVE) {
PyObject *value = PEEK(1);
PyObject *res;
@@ -139,7 +263,7 @@
if (prod == NULL) goto pop_2_error;
STACK_SHRINK(1);
POKE(1, prod);
- next_instr += 1;
+ JUMPBY(1);
DISPATCH();
}
@@ -159,7 +283,7 @@
if (prod == NULL) goto pop_2_error;
STACK_SHRINK(1);
POKE(1, prod);
- next_instr += 1;
+ JUMPBY(1);
DISPATCH();
}
@@ -177,7 +301,7 @@
if (sub == NULL) goto pop_2_error;
STACK_SHRINK(1);
POKE(1, sub);
- next_instr += 1;
+ JUMPBY(1);
DISPATCH();
}
@@ -196,7 +320,7 @@
if (sub == NULL) goto pop_2_error;
STACK_SHRINK(1);
POKE(1, sub);
- next_instr += 1;
+ JUMPBY(1);
DISPATCH();
}
@@ -214,7 +338,7 @@
if (res == NULL) goto pop_2_error;
STACK_SHRINK(1);
POKE(1, res);
- next_instr += 1;
+ JUMPBY(1);
DISPATCH();
}
@@ -268,7 +392,7 @@
if (sum == NULL) goto pop_2_error;
STACK_SHRINK(1);
POKE(1, sum);
- next_instr += 1;
+ JUMPBY(1);
DISPATCH();
}
@@ -286,7 +410,7 @@
if (sum == NULL) goto pop_2_error;
STACK_SHRINK(1);
POKE(1, sum);
- next_instr += 1;
+ JUMPBY(1);
DISPATCH();
}
@@ -311,7 +435,7 @@
if (res == NULL) goto pop_2_error;
STACK_SHRINK(1);
POKE(1, res);
- next_instr += 4;
+ JUMPBY(4);
DISPATCH();
}
@@ -380,7 +504,7 @@
Py_DECREF(list);
STACK_SHRINK(1);
POKE(1, res);
- next_instr += 4;
+ JUMPBY(4);
DISPATCH();
}
@@ -406,7 +530,7 @@
Py_DECREF(tuple);
STACK_SHRINK(1);
POKE(1, res);
- next_instr += 4;
+ JUMPBY(4);
DISPATCH();
}
@@ -424,22 +548,22 @@
}
Py_DECREF(dict);
Py_DECREF(sub);
- if (1) goto pop_2_error;
+ if (true) goto pop_2_error;
}
Py_INCREF(res); // Do this before DECREF'ing dict, sub
Py_DECREF(dict);
Py_DECREF(sub);
STACK_SHRINK(1);
POKE(1, res);
- next_instr += 4;
+ JUMPBY(4);
DISPATCH();
}
TARGET(BINARY_SUBSCR_GETITEM) {
- uint32_t type_version = read_u32(next_instr + 1);
- uint16_t func_version = read_u16(next_instr + 3);
PyObject *sub = PEEK(1);
PyObject *container = PEEK(2);
+ uint32_t type_version = read_u32(&next_instr[1].cache);
+ uint16_t func_version = read_u16(&next_instr[3].cache);
PyTypeObject *tp = Py_TYPE(container);
DEOPT_IF(tp->tp_version_tag != type_version, BINARY_SUBSCR);
assert(tp->tp_flags & Py_TPFLAGS_HEAPTYPE);
@@ -464,22 +588,21 @@
}
TARGET(LIST_APPEND) {
- PyObject *v = POP();
- PyObject *list = PEEK(oparg);
- if (_PyList_AppendTakeRef((PyListObject *)list, v) < 0)
- goto error;
+ PyObject *v = PEEK(1);
+ PyObject *list = PEEK(oparg + 1); // +1 to account for v staying on stack
+ if (_PyList_AppendTakeRef((PyListObject *)list, v) < 0) goto pop_1_error;
+ STACK_SHRINK(1);
PREDICT(JUMP_BACKWARD);
DISPATCH();
}
TARGET(SET_ADD) {
- PyObject *v = POP();
- PyObject *set = PEEK(oparg);
- int err;
- err = PySet_Add(set, v);
+ PyObject *v = PEEK(1);
+ PyObject *set = PEEK(oparg + 1); // +1 to account for v staying on stack
+ int err = PySet_Add(set, v);
Py_DECREF(v);
- if (err != 0)
- goto error;
+ if (err) goto pop_1_error;
+ STACK_SHRINK(1);
PREDICT(JUMP_BACKWARD);
DISPATCH();
}
@@ -489,31 +612,32 @@
PyObject *sub = PEEK(1);
PyObject *container = PEEK(2);
PyObject *v = PEEK(3);
- _PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)next_instr;
- if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
+ uint16_t counter = read_u16(&next_instr[0].cache);
+ if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
assert(cframe.use_tracing == 0);
next_instr--;
_Py_Specialize_StoreSubscr(container, sub, next_instr);
DISPATCH_SAME_OPARG();
}
STAT_INC(STORE_SUBSCR, deferred);
+ _PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)next_instr;
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
/* container[sub] = v */
int err = PyObject_SetItem(container, sub, v);
Py_DECREF(v);
Py_DECREF(container);
Py_DECREF(sub);
- if (err != 0) goto pop_3_error;
- JUMPBY(INLINE_CACHE_ENTRIES_STORE_SUBSCR);
+ if (err) goto pop_3_error;
STACK_SHRINK(3);
+ JUMPBY(1);
DISPATCH();
}
TARGET(STORE_SUBSCR_LIST_INT) {
+ PyObject *sub = PEEK(1);
+ PyObject *list = PEEK(2);
+ PyObject *value = PEEK(3);
assert(cframe.use_tracing == 0);
- PyObject *sub = TOP();
- PyObject *list = SECOND();
- PyObject *value = THIRD();
DEOPT_IF(!PyLong_CheckExact(sub), STORE_SUBSCR);
DEOPT_IF(!PyList_CheckExact(list), STORE_SUBSCR);
@@ -526,61 +650,58 @@
PyObject *old_value = PyList_GET_ITEM(list, index);
PyList_SET_ITEM(list, index, value);
- STACK_SHRINK(3);
assert(old_value != NULL);
Py_DECREF(old_value);
_Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free);
Py_DECREF(list);
- JUMPBY(INLINE_CACHE_ENTRIES_STORE_SUBSCR);
+ STACK_SHRINK(3);
+ JUMPBY(1);
DISPATCH();
}
TARGET(STORE_SUBSCR_DICT) {
+ PyObject *sub = PEEK(1);
+ PyObject *dict = PEEK(2);
+ PyObject *value = PEEK(3);
assert(cframe.use_tracing == 0);
- PyObject *sub = TOP();
- PyObject *dict = SECOND();
- PyObject *value = THIRD();
DEOPT_IF(!PyDict_CheckExact(dict), STORE_SUBSCR);
- STACK_SHRINK(3);
STAT_INC(STORE_SUBSCR, hit);
int err = _PyDict_SetItem_Take2((PyDictObject *)dict, sub, value);
Py_DECREF(dict);
- if (err != 0) {
- goto error;
- }
- JUMPBY(INLINE_CACHE_ENTRIES_STORE_SUBSCR);
+ if (err) goto pop_3_error;
+ STACK_SHRINK(3);
+ JUMPBY(1);
DISPATCH();
}
TARGET(DELETE_SUBSCR) {
- PyObject *sub = TOP();
- PyObject *container = SECOND();
- int err;
- STACK_SHRINK(2);
+ PyObject *sub = PEEK(1);
+ PyObject *container = PEEK(2);
/* del container[sub] */
- err = PyObject_DelItem(container, sub);
+ int err = PyObject_DelItem(container, sub);
Py_DECREF(container);
Py_DECREF(sub);
- if (err != 0)
- goto error;
+ if (err) goto pop_2_error;
+ STACK_SHRINK(2);
DISPATCH();
}
TARGET(PRINT_EXPR) {
- PyObject *value = POP();
+ PyObject *value = PEEK(1);
PyObject *hook = _PySys_GetAttr(tstate, &_Py_ID(displayhook));
PyObject *res;
+ // Can't use ERROR_IF here.
if (hook == NULL) {
_PyErr_SetString(tstate, PyExc_RuntimeError,
"lost sys.displayhook");
Py_DECREF(value);
- goto error;
+ if (true) goto pop_1_error;
}
res = PyObject_CallOneArg(hook, value);
Py_DECREF(value);
- if (res == NULL)
- goto error;
+ if (res == NULL) goto pop_1_error;
Py_DECREF(res);
+ STACK_SHRINK(1);
DISPATCH();
}
@@ -607,9 +728,10 @@
}
TARGET(INTERPRETER_EXIT) {
+ PyObject *retval = PEEK(1);
assert(frame == &entry_frame);
assert(_PyFrame_IsIncomplete(frame));
- PyObject *retval = POP();
+ STACK_SHRINK(1); // Since we're not going to DISPATCH()
assert(EMPTY());
/* Restore previous cframe and return. */
tstate->cframe = cframe.previous;
@@ -621,59 +743,56 @@
}
TARGET(RETURN_VALUE) {
- PyObject *retval = POP();
+ PyObject *retval = PEEK(1);
+ STACK_SHRINK(1);
assert(EMPTY());
_PyFrame_SetStackPointer(frame, stack_pointer);
TRACE_FUNCTION_EXIT();
DTRACE_FUNCTION_EXIT();
_Py_LeaveRecursiveCallPy(tstate);
assert(frame != &entry_frame);
- frame = cframe.current_frame = pop_frame(tstate, frame);
+ // GH-99729: We need to unlink the frame *before* clearing it:
+ _PyInterpreterFrame *dying = frame;
+ frame = cframe.current_frame = dying->previous;
+ _PyEvalFrameClearAndPop(tstate, dying);
_PyFrame_StackPush(frame, retval);
goto resume_frame;
}
TARGET(GET_AITER) {
+ PyObject *obj = PEEK(1);
+ PyObject *iter;
unaryfunc getter = NULL;
- PyObject *iter = NULL;
- PyObject *obj = TOP();
PyTypeObject *type = Py_TYPE(obj);
if (type->tp_as_async != NULL) {
getter = type->tp_as_async->am_aiter;
}
- if (getter != NULL) {
- iter = (*getter)(obj);
- Py_DECREF(obj);
- if (iter == NULL) {
- SET_TOP(NULL);
- goto error;
- }
- }
- else {
- SET_TOP(NULL);
+ if (getter == NULL) {
_PyErr_Format(tstate, PyExc_TypeError,
"'async for' requires an object with "
"__aiter__ method, got %.100s",
type->tp_name);
Py_DECREF(obj);
- goto error;
+ if (true) goto pop_1_error;
}
+ iter = (*getter)(obj);
+ Py_DECREF(obj);
+ if (iter == NULL) goto pop_1_error;
+
if (Py_TYPE(iter)->tp_as_async == NULL ||
Py_TYPE(iter)->tp_as_async->am_anext == NULL) {
- SET_TOP(NULL);
_PyErr_Format(tstate, PyExc_TypeError,
"'async for' received an object from __aiter__ "
"that does not implement __anext__: %.100s",
Py_TYPE(iter)->tp_name);
Py_DECREF(iter);
- goto error;
+ if (true) goto pop_1_error;
}
-
- SET_TOP(iter);
+ POKE(1, iter);
DISPATCH();
}
@@ -1128,51 +1247,46 @@
TARGET(STORE_ATTR) {
PREDICTED(STORE_ATTR);
- _PyAttrCache *cache = (_PyAttrCache *)next_instr;
- if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
+ PyObject *owner = PEEK(1);
+ PyObject *v = PEEK(2);
+ uint16_t counter = read_u16(&next_instr[0].cache);
+ if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
assert(cframe.use_tracing == 0);
- PyObject *owner = TOP();
PyObject *name = GETITEM(names, oparg);
next_instr--;
_Py_Specialize_StoreAttr(owner, next_instr, name);
DISPATCH_SAME_OPARG();
}
STAT_INC(STORE_ATTR, deferred);
+ _PyAttrCache *cache = (_PyAttrCache *)next_instr;
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
PyObject *name = GETITEM(names, oparg);
- PyObject *owner = TOP();
- PyObject *v = SECOND();
- int err;
- STACK_SHRINK(2);
- err = PyObject_SetAttr(owner, name, v);
+ int err = PyObject_SetAttr(owner, name, v);
Py_DECREF(v);
Py_DECREF(owner);
- if (err != 0) {
- goto error;
- }
- JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR);
+ if (err) goto pop_2_error;
+ STACK_SHRINK(2);
+ JUMPBY(4);
DISPATCH();
}
TARGET(DELETE_ATTR) {
+ PyObject *owner = PEEK(1);
PyObject *name = GETITEM(names, oparg);
- PyObject *owner = POP();
- int err;
- err = PyObject_SetAttr(owner, name, (PyObject *)NULL);
+ int err = PyObject_SetAttr(owner, name, (PyObject *)NULL);
Py_DECREF(owner);
- if (err != 0)
- goto error;
+ if (err) goto pop_1_error;
+ STACK_SHRINK(1);
DISPATCH();
}
TARGET(STORE_GLOBAL) {
+ PyObject *v = PEEK(1);
PyObject *name = GETITEM(names, oparg);
- PyObject *v = POP();
- int err;
- err = PyDict_SetItem(GLOBALS(), name, v);
+ int err = PyDict_SetItem(GLOBALS(), name, v);
Py_DECREF(v);
- if (err != 0)
- goto error;
+ if (err) goto pop_1_error;
+ STACK_SHRINK(1);
DISPATCH();
}
@@ -1967,20 +2081,18 @@
}
TARGET(STORE_ATTR_INSTANCE_VALUE) {
+ PyObject *owner = PEEK(1);
+ PyObject *value = PEEK(2);
+ uint32_t type_version = read_u32(&next_instr[1].cache);
+ uint16_t index = read_u16(&next_instr[3].cache);
assert(cframe.use_tracing == 0);
- PyObject *owner = TOP();
PyTypeObject *tp = Py_TYPE(owner);
- _PyAttrCache *cache = (_PyAttrCache *)next_instr;
- uint32_t type_version = read_u32(cache->version);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR);
assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT);
PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner);
DEOPT_IF(!_PyDictOrValues_IsValues(dorv), STORE_ATTR);
STAT_INC(STORE_ATTR, hit);
- Py_ssize_t index = cache->index;
- STACK_SHRINK(1);
- PyObject *value = POP();
PyDictValues *values = _PyDictOrValues_GetValues(dorv);
PyObject *old_value = values->values[index];
values->values[index] = value;
@@ -1991,16 +2103,18 @@
Py_DECREF(old_value);
}
Py_DECREF(owner);
- JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR);
+ STACK_SHRINK(2);
+ JUMPBY(4);
DISPATCH();
}
TARGET(STORE_ATTR_WITH_HINT) {
+ PyObject *owner = PEEK(1);
+ PyObject *value = PEEK(2);
+ uint32_t type_version = read_u32(&next_instr[1].cache);
+ uint16_t hint = read_u16(&next_instr[3].cache);
assert(cframe.use_tracing == 0);
- PyObject *owner = TOP();
PyTypeObject *tp = Py_TYPE(owner);
- _PyAttrCache *cache = (_PyAttrCache *)next_instr;
- uint32_t type_version = read_u32(cache->version);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR);
assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT);
@@ -2010,17 +2124,14 @@
DEOPT_IF(dict == NULL, STORE_ATTR);
assert(PyDict_CheckExact((PyObject *)dict));
PyObject *name = GETITEM(names, oparg);
- uint16_t hint = cache->index;
DEOPT_IF(hint >= (size_t)dict->ma_keys->dk_nentries, STORE_ATTR);
- PyObject *value, *old_value;
+ PyObject *old_value;
uint64_t new_version;
if (DK_IS_UNICODE(dict->ma_keys)) {
PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint;
DEOPT_IF(ep->me_key != name, STORE_ATTR);
old_value = ep->me_value;
DEOPT_IF(old_value == NULL, STORE_ATTR);
- STACK_SHRINK(1);
- value = POP();
new_version = _PyDict_NotifyEvent(PyDict_EVENT_MODIFIED, dict, name, value);
ep->me_value = value;
}
@@ -2029,8 +2140,6 @@
DEOPT_IF(ep->me_key != name, STORE_ATTR);
old_value = ep->me_value;
DEOPT_IF(old_value == NULL, STORE_ATTR);
- STACK_SHRINK(1);
- value = POP();
new_version = _PyDict_NotifyEvent(PyDict_EVENT_MODIFIED, dict, name, value);
ep->me_value = value;
}
@@ -2043,37 +2152,39 @@
/* PEP 509 */
dict->ma_version_tag = new_version;
Py_DECREF(owner);
- JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR);
+ STACK_SHRINK(2);
+ JUMPBY(4);
DISPATCH();
}
TARGET(STORE_ATTR_SLOT) {
+ PyObject *owner = PEEK(1);
+ PyObject *value = PEEK(2);
+ uint32_t type_version = read_u32(&next_instr[1].cache);
+ uint16_t index = read_u16(&next_instr[3].cache);
assert(cframe.use_tracing == 0);
- PyObject *owner = TOP();
PyTypeObject *tp = Py_TYPE(owner);
- _PyAttrCache *cache = (_PyAttrCache *)next_instr;
- uint32_t type_version = read_u32(cache->version);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR);
- char *addr = (char *)owner + cache->index;
+ char *addr = (char *)owner + index;
STAT_INC(STORE_ATTR, hit);
- STACK_SHRINK(1);
- PyObject *value = POP();
PyObject *old_value = *(PyObject **)addr;
*(PyObject **)addr = value;
Py_XDECREF(old_value);
Py_DECREF(owner);
- JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR);
+ STACK_SHRINK(2);
+ JUMPBY(4);
DISPATCH();
}
TARGET(COMPARE_OP) {
PREDICTED(COMPARE_OP);
+ PyObject *right = PEEK(1);
+ PyObject *left = PEEK(2);
+ PyObject *res;
_PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
assert(cframe.use_tracing == 0);
- PyObject *right = TOP();
- PyObject *left = SECOND();
next_instr--;
_Py_Specialize_CompareOp(left, right, next_instr, oparg);
DISPATCH_SAME_OPARG();
@@ -2081,109 +2192,126 @@
STAT_INC(COMPARE_OP, deferred);
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
assert(oparg <= Py_GE);
- PyObject *right = POP();
- PyObject *left = TOP();
- PyObject *res = PyObject_RichCompare(left, right, oparg);
- SET_TOP(res);
+ res = PyObject_RichCompare(left, right, oparg);
Py_DECREF(left);
Py_DECREF(right);
- if (res == NULL) {
- goto error;
- }
- JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP);
+ if (res == NULL) goto pop_2_error;
+ STACK_SHRINK(1);
+ POKE(1, res);
+ JUMPBY(2);
DISPATCH();
}
TARGET(COMPARE_OP_FLOAT_JUMP) {
- assert(cframe.use_tracing == 0);
- // Combined: COMPARE_OP (float ? float) + POP_JUMP_IF_(true/false)
- _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr;
- int when_to_jump_mask = cache->mask;
- PyObject *right = TOP();
- PyObject *left = SECOND();
- DEOPT_IF(!PyFloat_CheckExact(left), COMPARE_OP);
- DEOPT_IF(!PyFloat_CheckExact(right), COMPARE_OP);
- double dleft = PyFloat_AS_DOUBLE(left);
- double dright = PyFloat_AS_DOUBLE(right);
- int sign = (dleft > dright) - (dleft < dright);
- DEOPT_IF(isnan(dleft), COMPARE_OP);
- DEOPT_IF(isnan(dright), COMPARE_OP);
- STAT_INC(COMPARE_OP, hit);
- JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP);
+ PyObject *_tmp_1 = PEEK(1);
+ PyObject *_tmp_2 = PEEK(2);
+ {
+ PyObject *right = _tmp_1;
+ PyObject *left = _tmp_2;
+ size_t jump;
+ uint16_t when_to_jump_mask = read_u16(&next_instr[1].cache);
+ assert(cframe.use_tracing == 0);
+ // Combined: COMPARE_OP (float ? float) + POP_JUMP_IF_(true/false)
+ DEOPT_IF(!PyFloat_CheckExact(left), COMPARE_OP);
+ DEOPT_IF(!PyFloat_CheckExact(right), COMPARE_OP);
+ double dleft = PyFloat_AS_DOUBLE(left);
+ double dright = PyFloat_AS_DOUBLE(right);
+ // 1 if <, 2 if ==, 4 if >; this matches when _to_jump_mask
+ int sign_ish = 2*(dleft > dright) + 2 - (dleft < dright);
+ DEOPT_IF(isnan(dleft), COMPARE_OP);
+ DEOPT_IF(isnan(dright), COMPARE_OP);
+ STAT_INC(COMPARE_OP, hit);
+ _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc);
+ _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc);
+ jump = sign_ish & when_to_jump_mask;
+ _tmp_2 = (PyObject *)jump;
+ }
+ JUMPBY(2);
NEXTOPARG();
- STACK_SHRINK(2);
- _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc);
- _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc);
- assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE);
- int jump = (1 << (sign + 1)) & when_to_jump_mask;
- if (!jump) {
- next_instr++;
- }
- else {
- JUMPBY(1 + oparg);
+ JUMPBY(1);
+ {
+ size_t jump = (size_t)_tmp_2;
+ assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE);
+ if (jump) {
+ JUMPBY(oparg);
+ }
}
+ STACK_SHRINK(2);
DISPATCH();
}
TARGET(COMPARE_OP_INT_JUMP) {
- assert(cframe.use_tracing == 0);
- // Combined: COMPARE_OP (int ? int) + POP_JUMP_IF_(true/false)
- _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr;
- int when_to_jump_mask = cache->mask;
- PyObject *right = TOP();
- PyObject *left = SECOND();
- DEOPT_IF(!PyLong_CheckExact(left), COMPARE_OP);
- DEOPT_IF(!PyLong_CheckExact(right), COMPARE_OP);
- DEOPT_IF((size_t)(Py_SIZE(left) + 1) > 2, COMPARE_OP);
- DEOPT_IF((size_t)(Py_SIZE(right) + 1) > 2, COMPARE_OP);
- STAT_INC(COMPARE_OP, hit);
- assert(Py_ABS(Py_SIZE(left)) <= 1 && Py_ABS(Py_SIZE(right)) <= 1);
- Py_ssize_t ileft = Py_SIZE(left) * ((PyLongObject *)left)->ob_digit[0];
- Py_ssize_t iright = Py_SIZE(right) * ((PyLongObject *)right)->ob_digit[0];
- int sign = (ileft > iright) - (ileft < iright);
- JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP);
+ PyObject *_tmp_1 = PEEK(1);
+ PyObject *_tmp_2 = PEEK(2);
+ {
+ PyObject *right = _tmp_1;
+ PyObject *left = _tmp_2;
+ size_t jump;
+ uint16_t when_to_jump_mask = read_u16(&next_instr[1].cache);
+ assert(cframe.use_tracing == 0);
+ // Combined: COMPARE_OP (int ? int) + POP_JUMP_IF_(true/false)
+ DEOPT_IF(!PyLong_CheckExact(left), COMPARE_OP);
+ DEOPT_IF(!PyLong_CheckExact(right), COMPARE_OP);
+ DEOPT_IF((size_t)(Py_SIZE(left) + 1) > 2, COMPARE_OP);
+ DEOPT_IF((size_t)(Py_SIZE(right) + 1) > 2, COMPARE_OP);
+ STAT_INC(COMPARE_OP, hit);
+ assert(Py_ABS(Py_SIZE(left)) <= 1 && Py_ABS(Py_SIZE(right)) <= 1);
+ Py_ssize_t ileft = Py_SIZE(left) * ((PyLongObject *)left)->ob_digit[0];
+ Py_ssize_t iright = Py_SIZE(right) * ((PyLongObject *)right)->ob_digit[0];
+ // 1 if <, 2 if ==, 4 if >; this matches when _to_jump_mask
+ int sign_ish = 2*(ileft > iright) + 2 - (ileft < iright);
+ _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free);
+ _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free);
+ jump = sign_ish & when_to_jump_mask;
+ _tmp_2 = (PyObject *)jump;
+ }
+ JUMPBY(2);
NEXTOPARG();
- STACK_SHRINK(2);
- _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free);
- _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free);
- assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE);
- int jump = (1 << (sign + 1)) & when_to_jump_mask;
- if (!jump) {
- next_instr++;
- }
- else {
- JUMPBY(1 + oparg);
+ JUMPBY(1);
+ {
+ size_t jump = (size_t)_tmp_2;
+ assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE);
+ if (jump) {
+ JUMPBY(oparg);
+ }
}
+ STACK_SHRINK(2);
DISPATCH();
}
TARGET(COMPARE_OP_STR_JUMP) {
- assert(cframe.use_tracing == 0);
- // Combined: COMPARE_OP (str == str or str != str) + POP_JUMP_IF_(true/false)
- _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr;
- int invert = cache->mask;
- PyObject *right = TOP();
- PyObject *left = SECOND();
- DEOPT_IF(!PyUnicode_CheckExact(left), COMPARE_OP);
- DEOPT_IF(!PyUnicode_CheckExact(right), COMPARE_OP);
- STAT_INC(COMPARE_OP, hit);
- int res = _PyUnicode_Equal(left, right);
- assert(oparg == Py_EQ || oparg == Py_NE);
- JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP);
+ PyObject *_tmp_1 = PEEK(1);
+ PyObject *_tmp_2 = PEEK(2);
+ {
+ PyObject *right = _tmp_1;
+ PyObject *left = _tmp_2;
+ size_t jump;
+ uint16_t invert = read_u16(&next_instr[1].cache);
+ assert(cframe.use_tracing == 0);
+ // Combined: COMPARE_OP (str == str or str != str) + POP_JUMP_IF_(true/false)
+ DEOPT_IF(!PyUnicode_CheckExact(left), COMPARE_OP);
+ DEOPT_IF(!PyUnicode_CheckExact(right), COMPARE_OP);
+ STAT_INC(COMPARE_OP, hit);
+ int res = _PyUnicode_Equal(left, right);
+ assert(oparg == Py_EQ || oparg == Py_NE);
+ _Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc);
+ _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc);
+ assert(res == 0 || res == 1);
+ assert(invert == 0 || invert == 1);
+ jump = res ^ invert;
+ _tmp_2 = (PyObject *)jump;
+ }
+ JUMPBY(2);
NEXTOPARG();
- assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE);
- STACK_SHRINK(2);
- _Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc);
- _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc);
- assert(res == 0 || res == 1);
- assert(invert == 0 || invert == 1);
- int jump = res ^ invert;
- if (!jump) {
- next_instr++;
- }
- else {
- JUMPBY(1 + oparg);
+ JUMPBY(1);
+ {
+ size_t jump = (size_t)_tmp_2;
+ assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE);
+ if (jump) {
+ JUMPBY(oparg);
+ }
}
+ STACK_SHRINK(2);
DISPATCH();
}
@@ -2631,6 +2759,29 @@
DISPATCH();
}
+ TARGET(FOR_ITER_TUPLE) {
+ assert(cframe.use_tracing == 0);
+ _PyTupleIterObject *it = (_PyTupleIterObject *)TOP();
+ DEOPT_IF(Py_TYPE(it) != &PyTupleIter_Type, FOR_ITER);
+ STAT_INC(FOR_ITER, hit);
+ PyTupleObject *seq = it->it_seq;
+ if (seq) {
+ if (it->it_index < PyTuple_GET_SIZE(seq)) {
+ PyObject *next = PyTuple_GET_ITEM(seq, it->it_index++);
+ PUSH(Py_NewRef(next));
+ JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER);
+ goto end_for_iter_tuple; // End of this instruction
+ }
+ it->it_seq = NULL;
+ Py_DECREF(seq);
+ }
+ STACK_SHRINK(1);
+ Py_DECREF(it);
+ JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1);
+ end_for_iter_tuple:
+ DISPATCH();
+ }
+
TARGET(FOR_ITER_RANGE) {
assert(cframe.use_tracing == 0);
_PyRangeIterObject *r = (_PyRangeIterObject *)TOP();
@@ -3542,6 +3693,7 @@
func->func_defaults = POP();
}
+ func->func_version = ((PyCodeObject *)codeobj)->co_version;
PUSH((PyObject *)func);
DISPATCH();
}
@@ -3678,7 +3830,7 @@
if (res == NULL) goto pop_2_error;
STACK_SHRINK(1);
POKE(1, res);
- next_instr += 1;
+ JUMPBY(1);
DISPATCH();
}
@@ -3702,127 +3854,3 @@
TARGET(CACHE) {
Py_UNREACHABLE();
}
-
- TARGET(LOAD_FAST__LOAD_FAST) {
- PyObject *_tmp_1;
- PyObject *_tmp_2;
- {
- PyObject *value;
- value = GETLOCAL(oparg);
- assert(value != NULL);
- Py_INCREF(value);
- _tmp_1 = value;
- }
- NEXTOPARG();
- next_instr++;
- {
- PyObject *value;
- value = GETLOCAL(oparg);
- assert(value != NULL);
- Py_INCREF(value);
- _tmp_2 = value;
- }
- STACK_GROW(2);
- POKE(1, _tmp_2);
- POKE(2, _tmp_1);
- DISPATCH();
- }
-
- TARGET(LOAD_FAST__LOAD_CONST) {
- PyObject *_tmp_1;
- PyObject *_tmp_2;
- {
- PyObject *value;
- value = GETLOCAL(oparg);
- assert(value != NULL);
- Py_INCREF(value);
- _tmp_1 = value;
- }
- NEXTOPARG();
- next_instr++;
- {
- PyObject *value;
- value = GETITEM(consts, oparg);
- Py_INCREF(value);
- _tmp_2 = value;
- }
- STACK_GROW(2);
- POKE(1, _tmp_2);
- POKE(2, _tmp_1);
- DISPATCH();
- }
-
- TARGET(STORE_FAST__LOAD_FAST) {
- PyObject *_tmp_1 = PEEK(1);
- {
- PyObject *value = _tmp_1;
- SETLOCAL(oparg, value);
- }
- NEXTOPARG();
- next_instr++;
- {
- PyObject *value;
- value = GETLOCAL(oparg);
- assert(value != NULL);
- Py_INCREF(value);
- _tmp_1 = value;
- }
- POKE(1, _tmp_1);
- DISPATCH();
- }
-
- TARGET(STORE_FAST__STORE_FAST) {
- PyObject *_tmp_1 = PEEK(2);
- PyObject *_tmp_2 = PEEK(1);
- {
- PyObject *value = _tmp_2;
- SETLOCAL(oparg, value);
- }
- NEXTOPARG();
- next_instr++;
- {
- PyObject *value = _tmp_1;
- SETLOCAL(oparg, value);
- }
- STACK_SHRINK(2);
- DISPATCH();
- }
-
- TARGET(LOAD_CONST__LOAD_FAST) {
- PyObject *_tmp_1;
- PyObject *_tmp_2;
- {
- PyObject *value;
- value = GETITEM(consts, oparg);
- Py_INCREF(value);
- _tmp_1 = value;
- }
- NEXTOPARG();
- next_instr++;
- {
- PyObject *value;
- value = GETLOCAL(oparg);
- assert(value != NULL);
- Py_INCREF(value);
- _tmp_2 = value;
- }
- STACK_GROW(2);
- POKE(1, _tmp_2);
- POKE(2, _tmp_1);
- DISPATCH();
- }
-
- TARGET(END_FOR) {
- PyObject *_tmp_1 = PEEK(2);
- PyObject *_tmp_2 = PEEK(1);
- {
- PyObject *value = _tmp_2;
- Py_DECREF(value);
- }
- {
- PyObject *value = _tmp_1;
- Py_DECREF(value);
- }
- STACK_SHRINK(2);
- DISPATCH();
- }
diff --git a/Python/initconfig.c b/Python/initconfig.c
index 67f6777d3b1d9e..d05099cd997790 100644
--- a/Python/initconfig.c
+++ b/Python/initconfig.c
@@ -129,7 +129,14 @@ The following implementation-specific options are available:\n\
\n\
-X int_max_str_digits=number: limit the size of int<->str conversions.\n\
This helps avoid denial of service attacks when parsing untrusted data.\n\
- The default is sys.int_info.default_max_str_digits. 0 disables.";
+ The default is sys.int_info.default_max_str_digits. 0 disables."
+
+#ifdef Py_STATS
+"\n\
+\n\
+-X pystats: Enable pystats collection at startup."
+#endif
+;
/* Envvars that don't have equivalent command-line options are listed first */
static const char usage_envvars[] =
@@ -595,17 +602,13 @@ _Py_ClearStandardStreamEncoding(void)
/* --- Py_GetArgcArgv() ------------------------------------------- */
-/* For Py_GetArgcArgv(); set by _Py_SetArgcArgv() */
-static PyWideStringList orig_argv = {.length = 0, .items = NULL};
-
-
void
_Py_ClearArgcArgv(void)
{
PyMemAllocatorEx old_alloc;
_PyMem_SetDefaultAllocator(PYMEM_DOMAIN_RAW, &old_alloc);
- _PyWideStringList_Clear(&orig_argv);
+ _PyWideStringList_Clear(&_PyRuntime.orig_argv);
PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &old_alloc);
}
@@ -620,7 +623,9 @@ _Py_SetArgcArgv(Py_ssize_t argc, wchar_t * const *argv)
PyMemAllocatorEx old_alloc;
_PyMem_SetDefaultAllocator(PYMEM_DOMAIN_RAW, &old_alloc);
- res = _PyWideStringList_Copy(&orig_argv, &argv_list);
+ // XXX _PyRuntime.orig_argv only gets cleared by Py_Main(),
+ // so it it currently leaks for embedders.
+ res = _PyWideStringList_Copy(&_PyRuntime.orig_argv, &argv_list);
PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &old_alloc);
return res;
@@ -631,8 +636,8 @@ _Py_SetArgcArgv(Py_ssize_t argc, wchar_t * const *argv)
void
Py_GetArgcArgv(int *argc, wchar_t ***argv)
{
- *argc = (int)orig_argv.length;
- *argv = orig_argv.items;
+ *argc = (int)_PyRuntime.orig_argv.length;
+ *argv = _PyRuntime.orig_argv.items;
}
@@ -2188,6 +2193,12 @@ config_read(PyConfig *config, int compute_path_config)
config->show_ref_count = 1;
}
+#ifdef Py_STATS
+ if (config_get_xoption(config, L"pystats")) {
+ _py_stats = &_py_stats_struct;
+ }
+#endif
+
status = config_read_complex_options(config);
if (_PyStatus_EXCEPTION(status)) {
return status;
diff --git a/Python/opcode_targets.h b/Python/opcode_targets.h
index 3aba4e7556a65f..be3ad01c151c04 100644
--- a/Python/opcode_targets.h
+++ b/Python/opcode_targets.h
@@ -61,31 +61,31 @@ static void *opcode_targets[256] = {
&&TARGET_FOR_ITER_LIST,
&&TARGET_STORE_SUBSCR,
&&TARGET_DELETE_SUBSCR,
- &&TARGET_FOR_ITER_RANGE,
+ &&TARGET_FOR_ITER_TUPLE,
&&TARGET_STOPITERATION_ERROR,
+ &&TARGET_FOR_ITER_RANGE,
&&TARGET_FOR_ITER_GEN,
&&TARGET_LOAD_ATTR_CLASS,
&&TARGET_LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN,
- &&TARGET_LOAD_ATTR_INSTANCE_VALUE,
&&TARGET_GET_ITER,
&&TARGET_GET_YIELD_FROM_ITER,
&&TARGET_PRINT_EXPR,
&&TARGET_LOAD_BUILD_CLASS,
+ &&TARGET_LOAD_ATTR_INSTANCE_VALUE,
&&TARGET_LOAD_ATTR_MODULE,
- &&TARGET_LOAD_ATTR_PROPERTY,
&&TARGET_LOAD_ASSERTION_ERROR,
&&TARGET_RETURN_GENERATOR,
+ &&TARGET_LOAD_ATTR_PROPERTY,
&&TARGET_LOAD_ATTR_SLOT,
&&TARGET_LOAD_ATTR_WITH_HINT,
&&TARGET_LOAD_ATTR_METHOD_LAZY_DICT,
&&TARGET_LOAD_ATTR_METHOD_NO_DICT,
&&TARGET_LOAD_ATTR_METHOD_WITH_DICT,
- &&TARGET_LOAD_ATTR_METHOD_WITH_VALUES,
&&TARGET_LIST_TO_TUPLE,
&&TARGET_RETURN_VALUE,
&&TARGET_IMPORT_STAR,
&&TARGET_SETUP_ANNOTATIONS,
- &&TARGET_LOAD_CONST__LOAD_FAST,
+ &&TARGET_LOAD_ATTR_METHOD_WITH_VALUES,
&&TARGET_ASYNC_GEN_WRAP,
&&TARGET_PREP_RERAISE_STAR,
&&TARGET_POP_EXCEPT,
@@ -112,7 +112,7 @@ static void *opcode_targets[256] = {
&&TARGET_JUMP_FORWARD,
&&TARGET_JUMP_IF_FALSE_OR_POP,
&&TARGET_JUMP_IF_TRUE_OR_POP,
- &&TARGET_LOAD_FAST__LOAD_CONST,
+ &&TARGET_LOAD_CONST__LOAD_FAST,
&&TARGET_POP_JUMP_IF_FALSE,
&&TARGET_POP_JUMP_IF_TRUE,
&&TARGET_LOAD_GLOBAL,
@@ -120,7 +120,7 @@ static void *opcode_targets[256] = {
&&TARGET_CONTAINS_OP,
&&TARGET_RERAISE,
&&TARGET_COPY,
- &&TARGET_LOAD_FAST__LOAD_FAST,
+ &&TARGET_LOAD_FAST__LOAD_CONST,
&&TARGET_BINARY_OP,
&&TARGET_SEND,
&&TARGET_LOAD_FAST,
@@ -140,9 +140,9 @@ static void *opcode_targets[256] = {
&&TARGET_STORE_DEREF,
&&TARGET_DELETE_DEREF,
&&TARGET_JUMP_BACKWARD,
- &&TARGET_LOAD_GLOBAL_BUILTIN,
+ &&TARGET_LOAD_FAST__LOAD_FAST,
&&TARGET_CALL_FUNCTION_EX,
- &&TARGET_LOAD_GLOBAL_MODULE,
+ &&TARGET_LOAD_GLOBAL_BUILTIN,
&&TARGET_EXTENDED_ARG,
&&TARGET_LIST_APPEND,
&&TARGET_SET_ADD,
@@ -152,24 +152,24 @@ static void *opcode_targets[256] = {
&&TARGET_YIELD_VALUE,
&&TARGET_RESUME,
&&TARGET_MATCH_CLASS,
+ &&TARGET_LOAD_GLOBAL_MODULE,
&&TARGET_STORE_ATTR_INSTANCE_VALUE,
- &&TARGET_STORE_ATTR_SLOT,
&&TARGET_FORMAT_VALUE,
&&TARGET_BUILD_CONST_KEY_MAP,
&&TARGET_BUILD_STRING,
+ &&TARGET_STORE_ATTR_SLOT,
&&TARGET_STORE_ATTR_WITH_HINT,
&&TARGET_STORE_FAST__LOAD_FAST,
&&TARGET_STORE_FAST__STORE_FAST,
- &&TARGET_STORE_SUBSCR_DICT,
&&TARGET_LIST_EXTEND,
&&TARGET_SET_UPDATE,
&&TARGET_DICT_MERGE,
&&TARGET_DICT_UPDATE,
+ &&TARGET_STORE_SUBSCR_DICT,
&&TARGET_STORE_SUBSCR_LIST_INT,
&&TARGET_UNPACK_SEQUENCE_LIST,
&&TARGET_UNPACK_SEQUENCE_TUPLE,
&&TARGET_UNPACK_SEQUENCE_TWO_TUPLE,
- &&_unknown_opcode,
&&TARGET_CALL,
&&TARGET_KW_NAMES,
&&_unknown_opcode,
diff --git a/Python/perf_trampoline.c b/Python/perf_trampoline.c
index 161e0ef74cf1da..1957ab82c33951 100644
--- a/Python/perf_trampoline.c
+++ b/Python/perf_trampoline.c
@@ -134,11 +134,6 @@ any DWARF information available for them).
#include "pycore_frame.h"
#include "pycore_interp.h"
-typedef enum {
- PERF_STATUS_FAILED = -1, // Perf trampoline is in an invalid state
- PERF_STATUS_NO_INIT = 0, // Perf trampoline is not initialized
- PERF_STATUS_OK = 1, // Perf trampoline is ready to be executed
-} perf_status_t;
#ifdef PY_HAVE_PERF_TRAMPOLINE
@@ -190,24 +185,13 @@ struct code_arena_st {
};
typedef struct code_arena_st code_arena_t;
-
-struct trampoline_api_st {
- void* (*init_state)(void);
- void (*write_state)(void* state, const void *code_addr,
- unsigned int code_size, PyCodeObject* code);
- int (*free_state)(void* state);
- void *state;
-};
-
typedef struct trampoline_api_st trampoline_api_t;
-
-static perf_status_t perf_status = PERF_STATUS_NO_INIT;
-static Py_ssize_t extra_code_index = -1;
-static code_arena_t *code_arena;
-static trampoline_api_t trampoline_api;
-
-static FILE *perf_map_file;
+#define perf_status _PyRuntime.ceval.perf.status
+#define extra_code_index _PyRuntime.ceval.perf.extra_code_index
+#define perf_code_arena _PyRuntime.ceval.perf.code_arena
+#define trampoline_api _PyRuntime.ceval.perf.trampoline_api
+#define perf_map_file _PyRuntime.ceval.perf.map_file
static void *
perf_map_get_file(void)
@@ -344,17 +328,17 @@ new_code_arena(void)
new_arena->size = mem_size;
new_arena->size_left = mem_size;
new_arena->code_size = code_size;
- new_arena->prev = code_arena;
- code_arena = new_arena;
+ new_arena->prev = perf_code_arena;
+ perf_code_arena = new_arena;
return 0;
}
static void
free_code_arenas(void)
{
- code_arena_t *cur = code_arena;
+ code_arena_t *cur = perf_code_arena;
code_arena_t *prev;
- code_arena = NULL; // invalid static pointer
+ perf_code_arena = NULL; // invalid static pointer
while (cur) {
munmap(cur->start_addr, cur->size);
prev = cur->prev;
@@ -375,14 +359,14 @@ code_arena_new_code(code_arena_t *code_arena)
static inline py_trampoline
compile_trampoline(void)
{
- if ((code_arena == NULL) ||
- (code_arena->size_left <= code_arena->code_size)) {
+ if ((perf_code_arena == NULL) ||
+ (perf_code_arena->size_left <= perf_code_arena->code_size)) {
if (new_code_arena() < 0) {
return NULL;
}
}
- assert(code_arena->size_left <= code_arena->size);
- return code_arena_new_code(code_arena);
+ assert(perf_code_arena->size_left <= perf_code_arena->size);
+ return code_arena_new_code(perf_code_arena);
}
static PyObject *
@@ -405,7 +389,7 @@ py_trampoline_evaluator(PyThreadState *ts, _PyInterpreterFrame *frame,
goto default_eval;
}
trampoline_api.write_state(trampoline_api.state, new_trampoline,
- code_arena->code_size, co);
+ perf_code_arena->code_size, co);
_PyCode_SetExtra((PyObject *)co, extra_code_index,
(void *)new_trampoline);
f = new_trampoline;
diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c
index 8209132ebc6c27..1cb0e4d747e10a 100644
--- a/Python/pylifecycle.c
+++ b/Python/pylifecycle.c
@@ -54,7 +54,6 @@ extern void _PyIO_Fini(void);
#ifdef MS_WINDOWS
# undef BYTE
-# include "windows.h"
extern PyTypeObject PyWindowsConsoleIO_Type;
# define PyWindowsConsoleIO_Check(op) \
@@ -606,6 +605,11 @@ pycore_init_runtime(_PyRuntimeState *runtime,
return status;
}
+ status = _PyTime_Init();
+ if (_PyStatus_EXCEPTION(status)) {
+ return status;
+ }
+
status = _PyImport_Init();
if (_PyStatus_EXCEPTION(status)) {
return status;
diff --git a/Python/pystate.c b/Python/pystate.c
index 19fd9a6ae4497b..ea3c22c5d71ad6 100644
--- a/Python/pystate.c
+++ b/Python/pystate.c
@@ -466,6 +466,11 @@ interpreter_clear(PyInterpreterState *interp, PyThreadState *tstate)
}
interp->active_func_watchers = 0;
+ for (int i=0; i < CODE_MAX_WATCHERS; i++) {
+ interp->code_watchers[i] = NULL;
+ }
+ interp->active_code_watchers = 0;
+
// XXX Once we have one allocator per interpreter (i.e.
// per-interpreter GC) we must ensure that all of the interpreter's
// objects have been cleaned up at the point.
@@ -1784,30 +1789,78 @@ PyGILState_Release(PyGILState_STATE oldstate)
/* cross-interpreter data */
-crossinterpdatafunc _PyCrossInterpreterData_Lookup(PyObject *);
+static inline void
+_xidata_init(_PyCrossInterpreterData *data)
+{
+ // If the value is being reused
+ // then _xidata_clear() should have been called already.
+ assert(data->data == NULL);
+ assert(data->obj == NULL);
+ *data = (_PyCrossInterpreterData){0};
+ data->interp = -1;
+}
-/* This is a separate func from _PyCrossInterpreterData_Lookup in order
- to keep the registry code separate. */
-static crossinterpdatafunc
-_lookup_getdata(PyObject *obj)
+static inline void
+_xidata_clear(_PyCrossInterpreterData *data)
{
- crossinterpdatafunc getdata = _PyCrossInterpreterData_Lookup(obj);
- if (getdata == NULL && PyErr_Occurred() == 0)
- PyErr_Format(PyExc_ValueError,
- "%S does not support cross-interpreter data", obj);
- return getdata;
+ if (data->free != NULL) {
+ data->free(data->data);
+ }
+ data->data = NULL;
+ Py_CLEAR(data->obj);
+}
+
+void
+_PyCrossInterpreterData_Init(_PyCrossInterpreterData *data,
+ PyInterpreterState *interp,
+ void *shared, PyObject *obj,
+ xid_newobjectfunc new_object)
+{
+ assert(data != NULL);
+ assert(new_object != NULL);
+ _xidata_init(data);
+ data->data = shared;
+ if (obj != NULL) {
+ assert(interp != NULL);
+ // released in _PyCrossInterpreterData_Clear()
+ data->obj = Py_NewRef(obj);
+ }
+ // Ideally every object would know its owning interpreter.
+ // Until then, we have to rely on the caller to identify it
+ // (but we don't need it in all cases).
+ data->interp = (interp != NULL) ? interp->id : -1;
+ data->new_object = new_object;
}
int
-_PyObject_CheckCrossInterpreterData(PyObject *obj)
-{
- crossinterpdatafunc getdata = _lookup_getdata(obj);
- if (getdata == NULL) {
+_PyCrossInterpreterData_InitWithSize(_PyCrossInterpreterData *data,
+ PyInterpreterState *interp,
+ const size_t size, PyObject *obj,
+ xid_newobjectfunc new_object)
+{
+ assert(size > 0);
+ // For now we always free the shared data in the same interpreter
+ // where it was allocated, so the interpreter is required.
+ assert(interp != NULL);
+ _PyCrossInterpreterData_Init(data, interp, NULL, obj, new_object);
+ data->data = PyMem_Malloc(size);
+ if (data->data == NULL) {
return -1;
}
+ data->free = PyMem_Free;
return 0;
}
+void
+_PyCrossInterpreterData_Clear(PyInterpreterState *interp,
+ _PyCrossInterpreterData *data)
+{
+ assert(data != NULL);
+ // This must be called in the owning interpreter.
+ assert(interp == NULL || data->interp == interp->id);
+ _xidata_clear(data);
+}
+
static int
_check_xidata(PyThreadState *tstate, _PyCrossInterpreterData *data)
{
@@ -1830,6 +1883,30 @@ _check_xidata(PyThreadState *tstate, _PyCrossInterpreterData *data)
return 0;
}
+crossinterpdatafunc _PyCrossInterpreterData_Lookup(PyObject *);
+
+/* This is a separate func from _PyCrossInterpreterData_Lookup in order
+ to keep the registry code separate. */
+static crossinterpdatafunc
+_lookup_getdata(PyObject *obj)
+{
+ crossinterpdatafunc getdata = _PyCrossInterpreterData_Lookup(obj);
+ if (getdata == NULL && PyErr_Occurred() == 0)
+ PyErr_Format(PyExc_ValueError,
+ "%S does not support cross-interpreter data", obj);
+ return getdata;
+}
+
+int
+_PyObject_CheckCrossInterpreterData(PyObject *obj)
+{
+ crossinterpdatafunc getdata = _lookup_getdata(obj);
+ if (getdata == NULL) {
+ return -1;
+ }
+ return 0;
+}
+
int
_PyObject_GetCrossInterpreterData(PyObject *obj, _PyCrossInterpreterData *data)
{
@@ -1842,7 +1919,7 @@ _PyObject_GetCrossInterpreterData(PyObject *obj, _PyCrossInterpreterData *data)
// Reset data before re-populating.
*data = (_PyCrossInterpreterData){0};
- data->free = PyMem_RawFree; // Set a default that may be overridden.
+ data->interp = -1;
// Call the "getdata" func for the object.
Py_INCREF(obj);
@@ -1851,7 +1928,7 @@ _PyObject_GetCrossInterpreterData(PyObject *obj, _PyCrossInterpreterData *data)
Py_DECREF(obj);
return -1;
}
- int res = getdata(obj, data);
+ int res = getdata(tstate, obj, data);
Py_DECREF(obj);
if (res != 0) {
return -1;
@@ -1860,27 +1937,24 @@ _PyObject_GetCrossInterpreterData(PyObject *obj, _PyCrossInterpreterData *data)
// Fill in the blanks and validate the result.
data->interp = interp->id;
if (_check_xidata(tstate, data) != 0) {
- _PyCrossInterpreterData_Release(data);
+ (void)_PyCrossInterpreterData_Release(data);
return -1;
}
return 0;
}
-static void
-_release_xidata(void *arg)
+PyObject *
+_PyCrossInterpreterData_NewObject(_PyCrossInterpreterData *data)
{
- _PyCrossInterpreterData *data = (_PyCrossInterpreterData *)arg;
- if (data->free != NULL) {
- data->free(data->data);
- }
- Py_XDECREF(data->obj);
+ return data->new_object(data);
}
+typedef void (*releasefunc)(PyInterpreterState *, void *);
+
static void
_call_in_interpreter(struct _gilstate_runtime_state *gilstate,
- PyInterpreterState *interp,
- void (*func)(void *), void *arg)
+ PyInterpreterState *interp, releasefunc func, void *arg)
{
/* We would use Py_AddPendingCall() if it weren't specific to the
* main interpreter (see bpo-33608). In the meantime we take a
@@ -1894,7 +1968,9 @@ _call_in_interpreter(struct _gilstate_runtime_state *gilstate,
save_tstate = _PyThreadState_Swap(gilstate, tstate);
}
- func(arg);
+ // XXX Once the GIL is per-interpreter, this should be called with the
+ // calling interpreter's GIL released and the target interpreter's held.
+ func(interp, arg);
// Switch back.
if (save_tstate != NULL) {
@@ -1902,33 +1978,30 @@ _call_in_interpreter(struct _gilstate_runtime_state *gilstate,
}
}
-void
+int
_PyCrossInterpreterData_Release(_PyCrossInterpreterData *data)
{
- if (data->data == NULL && data->obj == NULL) {
+ if (data->free == NULL && data->obj == NULL) {
// Nothing to release!
- return;
+ data->data = NULL;
+ return 0;
}
// Switch to the original interpreter.
PyInterpreterState *interp = _PyInterpreterState_LookUpID(data->interp);
if (interp == NULL) {
// The interpreter was already destroyed.
- if (data->free != NULL) {
- // XXX Someone leaked some memory...
- }
- return;
+ // This function shouldn't have been called.
+ // XXX Someone leaked some memory...
+ assert(PyErr_Occurred());
+ return -1;
}
// "Release" the data and/or the object.
struct _gilstate_runtime_state *gilstate = &_PyRuntime.gilstate;
- _call_in_interpreter(gilstate, interp, _release_xidata, data);
-}
-
-PyObject *
-_PyCrossInterpreterData_NewObject(_PyCrossInterpreterData *data)
-{
- return data->new_object(data);
+ _call_in_interpreter(gilstate, interp,
+ (releasefunc)_PyCrossInterpreterData_Clear, data);
+ return 0;
}
/* registry of {type -> crossinterpdatafunc} */
@@ -1938,21 +2011,73 @@ _PyCrossInterpreterData_NewObject(_PyCrossInterpreterData *data)
crossinterpdatafunc. It would be simpler and more efficient. */
static int
-_register_xidata(struct _xidregistry *xidregistry, PyTypeObject *cls,
+_xidregistry_add_type(struct _xidregistry *xidregistry, PyTypeObject *cls,
crossinterpdatafunc getdata)
{
// Note that we effectively replace already registered classes
// rather than failing.
struct _xidregitem *newhead = PyMem_RawMalloc(sizeof(struct _xidregitem));
- if (newhead == NULL)
+ if (newhead == NULL) {
return -1;
- newhead->cls = cls;
+ }
+ // XXX Assign a callback to clear the entry from the registry?
+ newhead->cls = PyWeakref_NewRef((PyObject *)cls, NULL);
+ if (newhead->cls == NULL) {
+ PyMem_RawFree(newhead);
+ return -1;
+ }
newhead->getdata = getdata;
+ newhead->prev = NULL;
newhead->next = xidregistry->head;
+ if (newhead->next != NULL) {
+ newhead->next->prev = newhead;
+ }
xidregistry->head = newhead;
return 0;
}
+static struct _xidregitem *
+_xidregistry_remove_entry(struct _xidregistry *xidregistry,
+ struct _xidregitem *entry)
+{
+ struct _xidregitem *next = entry->next;
+ if (entry->prev != NULL) {
+ assert(entry->prev->next == entry);
+ entry->prev->next = next;
+ }
+ else {
+ assert(xidregistry->head == entry);
+ xidregistry->head = next;
+ }
+ if (next != NULL) {
+ next->prev = entry->prev;
+ }
+ Py_DECREF(entry->cls);
+ PyMem_RawFree(entry);
+ return next;
+}
+
+static struct _xidregitem *
+_xidregistry_find_type(struct _xidregistry *xidregistry, PyTypeObject *cls)
+{
+ struct _xidregitem *cur = xidregistry->head;
+ while (cur != NULL) {
+ PyObject *registered = PyWeakref_GetObject(cur->cls);
+ if (registered == Py_None) {
+ // The weakly ref'ed object was freed.
+ cur = _xidregistry_remove_entry(xidregistry, cur);
+ }
+ else {
+ assert(PyType_Check(registered));
+ if (registered == (PyObject *)cls) {
+ return cur;
+ }
+ cur = cur->next;
+ }
+ }
+ return NULL;
+}
+
static void _register_builtins_for_crossinterpreter_data(struct _xidregistry *xidregistry);
int
@@ -1968,19 +2093,32 @@ _PyCrossInterpreterData_RegisterClass(PyTypeObject *cls,
return -1;
}
- // Make sure the class isn't ever deallocated.
- Py_INCREF((PyObject *)cls);
-
struct _xidregistry *xidregistry = &_PyRuntime.xidregistry ;
PyThread_acquire_lock(xidregistry->mutex, WAIT_LOCK);
if (xidregistry->head == NULL) {
_register_builtins_for_crossinterpreter_data(xidregistry);
}
- int res = _register_xidata(xidregistry, cls, getdata);
+ int res = _xidregistry_add_type(xidregistry, cls, getdata);
+ PyThread_release_lock(xidregistry->mutex);
+ return res;
+}
+
+int
+_PyCrossInterpreterData_UnregisterClass(PyTypeObject *cls)
+{
+ int res = 0;
+ struct _xidregistry *xidregistry = &_PyRuntime.xidregistry ;
+ PyThread_acquire_lock(xidregistry->mutex, WAIT_LOCK);
+ struct _xidregitem *matched = _xidregistry_find_type(xidregistry, cls);
+ if (matched != NULL) {
+ (void)_xidregistry_remove_entry(xidregistry, matched);
+ res = 1;
+ }
PyThread_release_lock(xidregistry->mutex);
return res;
}
+
/* Cross-interpreter objects are looked up by exact match on the class.
We can reassess this policy when we move from a global registry to a
tp_* slot. */
@@ -1990,22 +2128,15 @@ _PyCrossInterpreterData_Lookup(PyObject *obj)
{
struct _xidregistry *xidregistry = &_PyRuntime.xidregistry ;
PyObject *cls = PyObject_Type(obj);
- crossinterpdatafunc getdata = NULL;
PyThread_acquire_lock(xidregistry->mutex, WAIT_LOCK);
- struct _xidregitem *cur = xidregistry->head;
- if (cur == NULL) {
+ if (xidregistry->head == NULL) {
_register_builtins_for_crossinterpreter_data(xidregistry);
- cur = xidregistry->head;
- }
- for(; cur != NULL; cur = cur->next) {
- if (cur->cls == (PyTypeObject *)cls) {
- getdata = cur->getdata;
- break;
- }
}
+ struct _xidregitem *matched = _xidregistry_find_type(xidregistry,
+ (PyTypeObject *)cls);
Py_DECREF(cls);
PyThread_release_lock(xidregistry->mutex);
- return getdata;
+ return matched != NULL ? matched->getdata : NULL;
}
/* cross-interpreter data for builtin types */
@@ -2023,16 +2154,21 @@ _new_bytes_object(_PyCrossInterpreterData *data)
}
static int
-_bytes_shared(PyObject *obj, _PyCrossInterpreterData *data)
+_bytes_shared(PyThreadState *tstate, PyObject *obj,
+ _PyCrossInterpreterData *data)
{
- struct _shared_bytes_data *shared = PyMem_NEW(struct _shared_bytes_data, 1);
+ if (_PyCrossInterpreterData_InitWithSize(
+ data, tstate->interp, sizeof(struct _shared_bytes_data), obj,
+ _new_bytes_object
+ ) < 0)
+ {
+ return -1;
+ }
+ struct _shared_bytes_data *shared = (struct _shared_bytes_data *)data->data;
if (PyBytes_AsStringAndSize(obj, &shared->bytes, &shared->len) < 0) {
+ _PyCrossInterpreterData_Clear(tstate->interp, data);
return -1;
}
- data->data = (void *)shared;
- data->obj = Py_NewRef(obj); // Will be "released" (decref'ed) when data released.
- data->new_object = _new_bytes_object;
- data->free = PyMem_Free;
return 0;
}
@@ -2050,16 +2186,20 @@ _new_str_object(_PyCrossInterpreterData *data)
}
static int
-_str_shared(PyObject *obj, _PyCrossInterpreterData *data)
+_str_shared(PyThreadState *tstate, PyObject *obj,
+ _PyCrossInterpreterData *data)
{
- struct _shared_str_data *shared = PyMem_NEW(struct _shared_str_data, 1);
+ if (_PyCrossInterpreterData_InitWithSize(
+ data, tstate->interp, sizeof(struct _shared_str_data), obj,
+ _new_str_object
+ ) < 0)
+ {
+ return -1;
+ }
+ struct _shared_str_data *shared = (struct _shared_str_data *)data->data;
shared->kind = PyUnicode_KIND(obj);
shared->buffer = PyUnicode_DATA(obj);
shared->len = PyUnicode_GET_LENGTH(obj);
- data->data = (void *)shared;
- data->obj = Py_NewRef(obj); // Will be "released" (decref'ed) when data released.
- data->new_object = _new_str_object;
- data->free = PyMem_Free;
return 0;
}
@@ -2070,7 +2210,8 @@ _new_long_object(_PyCrossInterpreterData *data)
}
static int
-_long_shared(PyObject *obj, _PyCrossInterpreterData *data)
+_long_shared(PyThreadState *tstate, PyObject *obj,
+ _PyCrossInterpreterData *data)
{
/* Note that this means the size of shareable ints is bounded by
* sys.maxsize. Hence on 32-bit architectures that is half the
@@ -2083,10 +2224,9 @@ _long_shared(PyObject *obj, _PyCrossInterpreterData *data)
}
return -1;
}
- data->data = (void *)value;
- data->obj = NULL;
- data->new_object = _new_long_object;
- data->free = NULL;
+ _PyCrossInterpreterData_Init(data, tstate->interp, (void *)value, NULL,
+ _new_long_object);
+ // data->obj and data->free remain NULL
return 0;
}
@@ -2098,12 +2238,12 @@ _new_none_object(_PyCrossInterpreterData *data)
}
static int
-_none_shared(PyObject *obj, _PyCrossInterpreterData *data)
+_none_shared(PyThreadState *tstate, PyObject *obj,
+ _PyCrossInterpreterData *data)
{
- data->data = NULL;
- // data->obj remains NULL
- data->new_object = _new_none_object;
- data->free = NULL; // There is nothing to free.
+ _PyCrossInterpreterData_Init(data, tstate->interp, NULL, NULL,
+ _new_none_object);
+ // data->data, data->obj and data->free remain NULL
return 0;
}
@@ -2111,22 +2251,22 @@ static void
_register_builtins_for_crossinterpreter_data(struct _xidregistry *xidregistry)
{
// None
- if (_register_xidata(xidregistry, (PyTypeObject *)PyObject_Type(Py_None), _none_shared) != 0) {
+ if (_xidregistry_add_type(xidregistry, (PyTypeObject *)PyObject_Type(Py_None), _none_shared) != 0) {
Py_FatalError("could not register None for cross-interpreter sharing");
}
// int
- if (_register_xidata(xidregistry, &PyLong_Type, _long_shared) != 0) {
+ if (_xidregistry_add_type(xidregistry, &PyLong_Type, _long_shared) != 0) {
Py_FatalError("could not register int for cross-interpreter sharing");
}
// bytes
- if (_register_xidata(xidregistry, &PyBytes_Type, _bytes_shared) != 0) {
+ if (_xidregistry_add_type(xidregistry, &PyBytes_Type, _bytes_shared) != 0) {
Py_FatalError("could not register bytes for cross-interpreter sharing");
}
// str
- if (_register_xidata(xidregistry, &PyUnicode_Type, _str_shared) != 0) {
+ if (_xidregistry_add_type(xidregistry, &PyUnicode_Type, _str_shared) != 0) {
Py_FatalError("could not register str for cross-interpreter sharing");
}
}
diff --git a/Python/specialize.c b/Python/specialize.c
index cd09b188b7fa97..4fa2c2ffc04bc5 100644
--- a/Python/specialize.c
+++ b/Python/specialize.c
@@ -18,7 +18,7 @@
#ifdef Py_STATS
PyStats _py_stats_struct = { 0 };
-PyStats *_py_stats = &_py_stats_struct;
+PyStats *_py_stats = NULL;
#define ADD_STAT_TO_DICT(res, field) \
do { \
@@ -205,9 +205,6 @@ _Py_StatsClear(void)
void
_Py_PrintSpecializationStats(int to_file)
{
- if (_py_stats == NULL) {
- return;
- }
FILE *out = stderr;
if (to_file) {
/* Write to a file instead of stderr. */
@@ -238,7 +235,7 @@ _Py_PrintSpecializationStats(int to_file)
else {
fprintf(out, "Specialization stats:\n");
}
- print_stats(out, _py_stats);
+ print_stats(out, &_py_stats_struct);
if (out != stderr) {
fclose(out);
}
@@ -270,26 +267,26 @@ _PyCode_Quicken(PyCodeObject *code)
int opcode = _PyOpcode_Deopt[_Py_OPCODE(instructions[i])];
int caches = _PyOpcode_Caches[opcode];
if (caches) {
- instructions[i + 1] = adaptive_counter_warmup();
+ instructions[i + 1].cache = adaptive_counter_warmup();
previous_opcode = 0;
i += caches;
continue;
}
switch (previous_opcode << 8 | opcode) {
case LOAD_CONST << 8 | LOAD_FAST:
- _Py_SET_OPCODE(instructions[i - 1], LOAD_CONST__LOAD_FAST);
+ instructions[i - 1].opcode = LOAD_CONST__LOAD_FAST;
break;
case LOAD_FAST << 8 | LOAD_CONST:
- _Py_SET_OPCODE(instructions[i - 1], LOAD_FAST__LOAD_CONST);
+ instructions[i - 1].opcode = LOAD_FAST__LOAD_CONST;
break;
case LOAD_FAST << 8 | LOAD_FAST:
- _Py_SET_OPCODE(instructions[i - 1], LOAD_FAST__LOAD_FAST);
+ instructions[i - 1].opcode = LOAD_FAST__LOAD_FAST;
break;
case STORE_FAST << 8 | LOAD_FAST:
- _Py_SET_OPCODE(instructions[i - 1], STORE_FAST__LOAD_FAST);
+ instructions[i - 1].opcode = STORE_FAST__LOAD_FAST;
break;
case STORE_FAST << 8 | STORE_FAST:
- _Py_SET_OPCODE(instructions[i - 1], STORE_FAST__STORE_FAST);
+ instructions[i - 1].opcode = STORE_FAST__STORE_FAST;
break;
}
previous_opcode = opcode;
@@ -310,6 +307,7 @@ _PyCode_Quicken(PyCodeObject *code)
#define SPEC_FAIL_NOT_PY_FUNCTION 7
+#define SPEC_FAIL_LOAD_GLOBAL_NON_DICT 17
#define SPEC_FAIL_LOAD_GLOBAL_NON_STRING_OR_SPLIT 18
/* Attributes */
@@ -335,6 +333,8 @@ _PyCode_Quicken(PyCodeObject *code)
#define SPEC_FAIL_ATTR_INSTANCE_ATTRIBUTE 26
#define SPEC_FAIL_ATTR_METACLASS_ATTRIBUTE 27
#define SPEC_FAIL_ATTR_PROPERTY_NOT_PY_FUNCTION 28
+#define SPEC_FAIL_ATTR_NOT_IN_KEYS 29
+#define SPEC_FAIL_ATTR_NOT_IN_DICT 30
/* Binary subscr and store subscr */
@@ -451,41 +451,44 @@ static bool function_check_args(PyObject *o, int expected_argcount, int opcode);
static uint32_t function_get_version(PyObject *o, int opcode);
static int
-specialize_module_load_attr(PyObject *owner, _Py_CODEUNIT *instr,
- PyObject *name, int opcode, int opcode_module)
-{
+specialize_module_load_attr(
+ PyObject *owner, _Py_CODEUNIT *instr, PyObject *name
+) {
_PyAttrCache *cache = (_PyAttrCache *)(instr + 1);
PyModuleObject *m = (PyModuleObject *)owner;
assert((owner->ob_type->tp_flags & Py_TPFLAGS_MANAGED_DICT) == 0);
PyDictObject *dict = (PyDictObject *)m->md_dict;
if (dict == NULL) {
- SPECIALIZATION_FAIL(opcode, SPEC_FAIL_NO_DICT);
+ SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_NO_DICT);
return -1;
}
if (dict->ma_keys->dk_kind != DICT_KEYS_UNICODE) {
- SPECIALIZATION_FAIL(opcode, SPEC_FAIL_ATTR_NON_STRING_OR_SPLIT);
+ SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_NON_STRING_OR_SPLIT);
return -1;
}
Py_ssize_t index = _PyDict_LookupIndex(dict, &_Py_ID(__getattr__));
assert(index != DKIX_ERROR);
if (index != DKIX_EMPTY) {
- SPECIALIZATION_FAIL(opcode, SPEC_FAIL_ATTR_MODULE_ATTR_NOT_FOUND);
+ SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_MODULE_ATTR_NOT_FOUND);
return -1;
}
index = _PyDict_LookupIndex(dict, name);
assert (index != DKIX_ERROR);
if (index != (uint16_t)index) {
- SPECIALIZATION_FAIL(opcode, SPEC_FAIL_OUT_OF_RANGE);
+ SPECIALIZATION_FAIL(LOAD_ATTR,
+ index == DKIX_EMPTY ?
+ SPEC_FAIL_ATTR_MODULE_ATTR_NOT_FOUND :
+ SPEC_FAIL_OUT_OF_RANGE);
return -1;
}
uint32_t keys_version = _PyDictKeys_GetVersionForCurrentState(dict->ma_keys);
if (keys_version == 0) {
- SPECIALIZATION_FAIL(opcode, SPEC_FAIL_OUT_OF_VERSIONS);
+ SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_OUT_OF_VERSIONS);
return -1;
}
write_u32(cache->version, keys_version);
cache->index = (uint16_t)index;
- _Py_SET_OPCODE(*instr, opcode_module);
+ _py_set_opocde(instr, LOAD_ATTR_MODULE);
return 0;
}
@@ -632,12 +635,15 @@ specialize_dict_access(
Py_ssize_t index = _PyDictKeys_StringLookup(keys, name);
assert (index != DKIX_ERROR);
if (index != (uint16_t)index) {
- SPECIALIZATION_FAIL(base_op, SPEC_FAIL_OUT_OF_RANGE);
+ SPECIALIZATION_FAIL(base_op,
+ index == DKIX_EMPTY ?
+ SPEC_FAIL_ATTR_NOT_IN_KEYS :
+ SPEC_FAIL_OUT_OF_RANGE);
return 0;
}
write_u32(cache->version, type->tp_version_tag);
cache->index = (uint16_t)index;
- _Py_SET_OPCODE(*instr, values_op);
+ _py_set_opocde(instr, values_op);
}
else {
PyDictObject *dict = (PyDictObject *)_PyDictOrValues_GetDict(dorv);
@@ -649,12 +655,15 @@ specialize_dict_access(
Py_ssize_t index =
_PyDict_LookupIndex(dict, name);
if (index != (uint16_t)index) {
- SPECIALIZATION_FAIL(base_op, SPEC_FAIL_OUT_OF_RANGE);
+ SPECIALIZATION_FAIL(base_op,
+ index == DKIX_EMPTY ?
+ SPEC_FAIL_ATTR_NOT_IN_DICT :
+ SPEC_FAIL_OUT_OF_RANGE);
return 0;
}
cache->index = (uint16_t)index;
write_u32(cache->version, type->tp_version_tag);
- _Py_SET_OPCODE(*instr, hint_op);
+ _py_set_opocde(instr, hint_op);
}
return 1;
}
@@ -677,8 +686,7 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name)
goto fail;
}
if (PyModule_CheckExact(owner)) {
- if (specialize_module_load_attr(owner, instr, name, LOAD_ATTR,
- LOAD_ATTR_MODULE))
+ if (specialize_module_load_attr(owner, instr, name))
{
goto fail;
}
@@ -705,7 +713,9 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name)
goto success;
}
}
- SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_METHOD);
+ else {
+ SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_METHOD);
+ }
goto fail;
}
case PROPERTY:
@@ -733,7 +743,7 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name)
write_u32(lm_cache->type_version, type->tp_version_tag);
/* borrowed */
write_obj(lm_cache->descr, fget);
- _Py_SET_OPCODE(*instr, LOAD_ATTR_PROPERTY);
+ _py_set_opocde(instr, LOAD_ATTR_PROPERTY);
goto success;
}
case OBJECT_SLOT:
@@ -757,7 +767,7 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name)
assert(offset > 0);
cache->index = (uint16_t)offset;
write_u32(cache->version, type->tp_version_tag);
- _Py_SET_OPCODE(*instr, LOAD_ATTR_SLOT);
+ _py_set_opocde(instr, LOAD_ATTR_SLOT);
goto success;
}
case DUNDER_CLASS:
@@ -766,7 +776,7 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name)
assert(offset == (uint16_t)offset);
cache->index = (uint16_t)offset;
write_u32(cache->version, type->tp_version_tag);
- _Py_SET_OPCODE(*instr, LOAD_ATTR_SLOT);
+ _py_set_opocde(instr, LOAD_ATTR_SLOT);
goto success;
}
case OTHER_SLOT:
@@ -794,7 +804,7 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name)
/* borrowed */
write_obj(lm_cache->descr, descr);
write_u32(lm_cache->type_version, type->tp_version_tag);
- _Py_SET_OPCODE(*instr, LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN);
+ _py_set_opocde(instr, LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN);
goto success;
}
case BUILTIN_CLASSMETHOD:
@@ -812,7 +822,7 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name)
fail:
STAT_INC(LOAD_ATTR, failure);
assert(!PyErr_Occurred());
- _Py_SET_OPCODE(*instr, LOAD_ATTR);
+ _py_set_opocde(instr, LOAD_ATTR);
cache->counter = adaptive_counter_backoff(cache->counter);
return;
success:
@@ -871,7 +881,7 @@ _Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name)
assert(offset > 0);
cache->index = (uint16_t)offset;
write_u32(cache->version, type->tp_version_tag);
- _Py_SET_OPCODE(*instr, STORE_ATTR_SLOT);
+ _py_set_opocde(instr, STORE_ATTR_SLOT);
goto success;
}
case DUNDER_CLASS:
@@ -900,7 +910,7 @@ _Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name)
fail:
STAT_INC(STORE_ATTR, failure);
assert(!PyErr_Occurred());
- _Py_SET_OPCODE(*instr, STORE_ATTR);
+ _py_set_opocde(instr, STORE_ATTR);
cache->counter = adaptive_counter_backoff(cache->counter);
return;
success:
@@ -964,7 +974,7 @@ specialize_class_load_attr(PyObject *owner, _Py_CODEUNIT *instr,
case NON_DESCRIPTOR:
write_u32(cache->type_version, ((PyTypeObject *)owner)->tp_version_tag);
write_obj(cache->descr, descr);
- _Py_SET_OPCODE(*instr, LOAD_ATTR_CLASS);
+ _py_set_opocde(instr, LOAD_ATTR_CLASS);
return 0;
#ifdef Py_STATS
case ABSENT:
@@ -1046,21 +1056,21 @@ PyObject *descr, DescriptorClassification kind)
}
switch(dictkind) {
case NO_DICT:
- _Py_SET_OPCODE(*instr, LOAD_ATTR_METHOD_NO_DICT);
+ _py_set_opocde(instr, LOAD_ATTR_METHOD_NO_DICT);
break;
case MANAGED_VALUES:
- _Py_SET_OPCODE(*instr, LOAD_ATTR_METHOD_WITH_VALUES);
+ _py_set_opocde(instr, LOAD_ATTR_METHOD_WITH_VALUES);
break;
case MANAGED_DICT:
SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_HAS_MANAGED_DICT);
goto fail;
case OFFSET_DICT:
assert(owner_cls->tp_dictoffset > 0 && owner_cls->tp_dictoffset <= INT16_MAX);
- _Py_SET_OPCODE(*instr, LOAD_ATTR_METHOD_WITH_DICT);
+ _py_set_opocde(instr, LOAD_ATTR_METHOD_WITH_DICT);
break;
case LAZY_DICT:
assert(owner_cls->tp_dictoffset > 0 && owner_cls->tp_dictoffset <= INT16_MAX);
- _Py_SET_OPCODE(*instr, LOAD_ATTR_METHOD_LAZY_DICT);
+ _py_set_opocde(instr, LOAD_ATTR_METHOD_LAZY_DICT);
break;
}
/* `descr` is borrowed. This is safe for methods (even inherited ones from
@@ -1079,7 +1089,6 @@ PyObject *descr, DescriptorClassification kind)
*/
write_u32(cache->type_version, owner_cls->tp_version_tag);
write_obj(cache->descr, descr);
- // Fall through.
return 1;
fail:
return 0;
@@ -1095,6 +1104,7 @@ _Py_Specialize_LoadGlobal(
_PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)(instr + 1);
assert(PyUnicode_CheckExact(name));
if (!PyDict_CheckExact(globals)) {
+ SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_LOAD_GLOBAL_NON_DICT);
goto fail;
}
PyDictKeysObject * globals_keys = ((PyDictObject *)globals)->ma_keys;
@@ -1104,23 +1114,26 @@ _Py_Specialize_LoadGlobal(
}
Py_ssize_t index = _PyDictKeys_StringLookup(globals_keys, name);
if (index == DKIX_ERROR) {
- SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_LOAD_GLOBAL_NON_STRING_OR_SPLIT);
+ SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_EXPECTED_ERROR);
goto fail;
}
if (index != DKIX_EMPTY) {
if (index != (uint16_t)index) {
+ SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_OUT_OF_RANGE);
goto fail;
}
uint32_t keys_version = _PyDictKeys_GetVersionForCurrentState(globals_keys);
if (keys_version == 0) {
+ SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_OUT_OF_VERSIONS);
goto fail;
}
cache->index = (uint16_t)index;
write_u32(cache->module_keys_version, keys_version);
- _Py_SET_OPCODE(*instr, LOAD_GLOBAL_MODULE);
+ _py_set_opocde(instr, LOAD_GLOBAL_MODULE);
goto success;
}
if (!PyDict_CheckExact(builtins)) {
+ SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_LOAD_GLOBAL_NON_DICT);
goto fail;
}
PyDictKeysObject * builtin_keys = ((PyDictObject *)builtins)->ma_keys;
@@ -1130,10 +1143,11 @@ _Py_Specialize_LoadGlobal(
}
index = _PyDictKeys_StringLookup(builtin_keys, name);
if (index == DKIX_ERROR) {
- SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_LOAD_GLOBAL_NON_STRING_OR_SPLIT);
+ SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_EXPECTED_ERROR);
goto fail;
}
if (index != (uint16_t)index) {
+ SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_OUT_OF_RANGE);
goto fail;
}
uint32_t globals_version = _PyDictKeys_GetVersionForCurrentState(globals_keys);
@@ -1153,12 +1167,12 @@ _Py_Specialize_LoadGlobal(
cache->index = (uint16_t)index;
write_u32(cache->module_keys_version, globals_version);
cache->builtin_keys_version = (uint16_t)builtins_version;
- _Py_SET_OPCODE(*instr, LOAD_GLOBAL_BUILTIN);
+ _py_set_opocde(instr, LOAD_GLOBAL_BUILTIN);
goto success;
fail:
STAT_INC(LOAD_GLOBAL, failure);
assert(!PyErr_Occurred());
- _Py_SET_OPCODE(*instr, LOAD_GLOBAL);
+ _py_set_opocde(instr, LOAD_GLOBAL);
cache->counter = adaptive_counter_backoff(cache->counter);
return;
success:
@@ -1262,7 +1276,7 @@ _Py_Specialize_BinarySubscr(
PyTypeObject *container_type = Py_TYPE(container);
if (container_type == &PyList_Type) {
if (PyLong_CheckExact(sub)) {
- _Py_SET_OPCODE(*instr, BINARY_SUBSCR_LIST_INT);
+ _py_set_opocde(instr, BINARY_SUBSCR_LIST_INT);
goto success;
}
SPECIALIZATION_FAIL(BINARY_SUBSCR,
@@ -1271,7 +1285,7 @@ _Py_Specialize_BinarySubscr(
}
if (container_type == &PyTuple_Type) {
if (PyLong_CheckExact(sub)) {
- _Py_SET_OPCODE(*instr, BINARY_SUBSCR_TUPLE_INT);
+ _py_set_opocde(instr, BINARY_SUBSCR_TUPLE_INT);
goto success;
}
SPECIALIZATION_FAIL(BINARY_SUBSCR,
@@ -1279,7 +1293,7 @@ _Py_Specialize_BinarySubscr(
goto fail;
}
if (container_type == &PyDict_Type) {
- _Py_SET_OPCODE(*instr, BINARY_SUBSCR_DICT);
+ _py_set_opocde(instr, BINARY_SUBSCR_DICT);
goto success;
}
PyTypeObject *cls = Py_TYPE(container);
@@ -1310,7 +1324,7 @@ _Py_Specialize_BinarySubscr(
}
cache->func_version = version;
((PyHeapTypeObject *)container_type)->_spec_cache.getitem = descriptor;
- _Py_SET_OPCODE(*instr, BINARY_SUBSCR_GETITEM);
+ _py_set_opocde(instr, BINARY_SUBSCR_GETITEM);
goto success;
}
SPECIALIZATION_FAIL(BINARY_SUBSCR,
@@ -1318,7 +1332,7 @@ _Py_Specialize_BinarySubscr(
fail:
STAT_INC(BINARY_SUBSCR, failure);
assert(!PyErr_Occurred());
- _Py_SET_OPCODE(*instr, BINARY_SUBSCR);
+ _py_set_opocde(instr, BINARY_SUBSCR);
cache->counter = adaptive_counter_backoff(cache->counter);
return;
success:
@@ -1337,7 +1351,7 @@ _Py_Specialize_StoreSubscr(PyObject *container, PyObject *sub, _Py_CODEUNIT *ins
if ((Py_SIZE(sub) == 0 || Py_SIZE(sub) == 1)
&& ((PyLongObject *)sub)->ob_digit[0] < (size_t)PyList_GET_SIZE(container))
{
- _Py_SET_OPCODE(*instr, STORE_SUBSCR_LIST_INT);
+ _py_set_opocde(instr, STORE_SUBSCR_LIST_INT);
goto success;
}
else {
@@ -1355,7 +1369,7 @@ _Py_Specialize_StoreSubscr(PyObject *container, PyObject *sub, _Py_CODEUNIT *ins
}
}
if (container_type == &PyDict_Type) {
- _Py_SET_OPCODE(*instr, STORE_SUBSCR_DICT);
+ _py_set_opocde(instr, STORE_SUBSCR_DICT);
goto success;
}
#ifdef Py_STATS
@@ -1422,7 +1436,7 @@ _Py_Specialize_StoreSubscr(PyObject *container, PyObject *sub, _Py_CODEUNIT *ins
fail:
STAT_INC(STORE_SUBSCR, failure);
assert(!PyErr_Occurred());
- _Py_SET_OPCODE(*instr, STORE_SUBSCR);
+ _py_set_opocde(instr, STORE_SUBSCR);
cache->counter = adaptive_counter_backoff(cache->counter);
return;
success:
@@ -1444,20 +1458,20 @@ specialize_class_call(PyObject *callable, _Py_CODEUNIT *instr, int nargs,
int oparg = _Py_OPARG(*instr);
if (nargs == 1 && kwnames == NULL && oparg == 1) {
if (tp == &PyUnicode_Type) {
- _Py_SET_OPCODE(*instr, CALL_NO_KW_STR_1);
+ _py_set_opocde(instr, CALL_NO_KW_STR_1);
return 0;
}
else if (tp == &PyType_Type) {
- _Py_SET_OPCODE(*instr, CALL_NO_KW_TYPE_1);
+ _py_set_opocde(instr, CALL_NO_KW_TYPE_1);
return 0;
}
else if (tp == &PyTuple_Type) {
- _Py_SET_OPCODE(*instr, CALL_NO_KW_TUPLE_1);
+ _py_set_opocde(instr, CALL_NO_KW_TUPLE_1);
return 0;
}
}
if (tp->tp_vectorcall != NULL) {
- _Py_SET_OPCODE(*instr, CALL_BUILTIN_CLASS);
+ _py_set_opocde(instr, CALL_BUILTIN_CLASS);
return 0;
}
SPECIALIZATION_FAIL(CALL, tp == &PyUnicode_Type ?
@@ -1509,7 +1523,7 @@ specialize_method_descriptor(PyMethodDescrObject *descr, _Py_CODEUNIT *instr,
SPECIALIZATION_FAIL(CALL, SPEC_FAIL_WRONG_NUMBER_ARGUMENTS);
return -1;
}
- _Py_SET_OPCODE(*instr, CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS);
+ _py_set_opocde(instr, CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS);
return 0;
}
case METH_O: {
@@ -1523,18 +1537,18 @@ specialize_method_descriptor(PyMethodDescrObject *descr, _Py_CODEUNIT *instr,
bool pop = (_Py_OPCODE(next) == POP_TOP);
int oparg = _Py_OPARG(*instr);
if ((PyObject *)descr == list_append && oparg == 1 && pop) {
- _Py_SET_OPCODE(*instr, CALL_NO_KW_LIST_APPEND);
+ _py_set_opocde(instr, CALL_NO_KW_LIST_APPEND);
return 0;
}
- _Py_SET_OPCODE(*instr, CALL_NO_KW_METHOD_DESCRIPTOR_O);
+ _py_set_opocde(instr, CALL_NO_KW_METHOD_DESCRIPTOR_O);
return 0;
}
case METH_FASTCALL: {
- _Py_SET_OPCODE(*instr, CALL_NO_KW_METHOD_DESCRIPTOR_FAST);
+ _py_set_opocde(instr, CALL_NO_KW_METHOD_DESCRIPTOR_FAST);
return 0;
}
case METH_FASTCALL|METH_KEYWORDS: {
- _Py_SET_OPCODE(*instr, CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS);
+ _py_set_opocde(instr, CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS);
return 0;
}
}
@@ -1585,14 +1599,14 @@ specialize_py_call(PyFunctionObject *func, _Py_CODEUNIT *instr, int nargs,
write_u32(cache->func_version, version);
cache->min_args = min_args;
if (argcount == nargs) {
- _Py_SET_OPCODE(*instr, bound_method ? CALL_BOUND_METHOD_EXACT_ARGS : CALL_PY_EXACT_ARGS);
+ _py_set_opocde(instr, bound_method ? CALL_BOUND_METHOD_EXACT_ARGS : CALL_PY_EXACT_ARGS);
}
else if (bound_method) {
SPECIALIZATION_FAIL(CALL, SPEC_FAIL_CALL_BOUND_METHOD);
return -1;
}
else {
- _Py_SET_OPCODE(*instr, CALL_PY_WITH_DEFAULTS);
+ _py_set_opocde(instr, CALL_PY_WITH_DEFAULTS);
}
return 0;
}
@@ -1619,10 +1633,10 @@ specialize_c_call(PyObject *callable, _Py_CODEUNIT *instr, int nargs,
/* len(o) */
PyInterpreterState *interp = _PyInterpreterState_GET();
if (callable == interp->callable_cache.len) {
- _Py_SET_OPCODE(*instr, CALL_NO_KW_LEN);
+ _py_set_opocde(instr, CALL_NO_KW_LEN);
return 0;
}
- _Py_SET_OPCODE(*instr, CALL_NO_KW_BUILTIN_O);
+ _py_set_opocde(instr, CALL_NO_KW_BUILTIN_O);
return 0;
}
case METH_FASTCALL: {
@@ -1634,15 +1648,15 @@ specialize_c_call(PyObject *callable, _Py_CODEUNIT *instr, int nargs,
/* isinstance(o1, o2) */
PyInterpreterState *interp = _PyInterpreterState_GET();
if (callable == interp->callable_cache.isinstance) {
- _Py_SET_OPCODE(*instr, CALL_NO_KW_ISINSTANCE);
+ _py_set_opocde(instr, CALL_NO_KW_ISINSTANCE);
return 0;
}
}
- _Py_SET_OPCODE(*instr, CALL_NO_KW_BUILTIN_FAST);
+ _py_set_opocde(instr, CALL_NO_KW_BUILTIN_FAST);
return 0;
}
case METH_FASTCALL | METH_KEYWORDS: {
- _Py_SET_OPCODE(*instr, CALL_BUILTIN_FAST_WITH_KEYWORDS);
+ _py_set_opocde(instr, CALL_BUILTIN_FAST_WITH_KEYWORDS);
return 0;
}
default:
@@ -1735,7 +1749,7 @@ _Py_Specialize_Call(PyObject *callable, _Py_CODEUNIT *instr, int nargs,
if (fail) {
STAT_INC(CALL, failure);
assert(!PyErr_Occurred());
- _Py_SET_OPCODE(*instr, CALL);
+ _py_set_opocde(instr, CALL);
cache->counter = adaptive_counter_backoff(cache->counter);
}
else {
@@ -1832,18 +1846,18 @@ _Py_Specialize_BinaryOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr,
bool to_store = (_Py_OPCODE(next) == STORE_FAST ||
_Py_OPCODE(next) == STORE_FAST__LOAD_FAST);
if (to_store && locals[_Py_OPARG(next)] == lhs) {
- _Py_SET_OPCODE(*instr, BINARY_OP_INPLACE_ADD_UNICODE);
+ _py_set_opocde(instr, BINARY_OP_INPLACE_ADD_UNICODE);
goto success;
}
- _Py_SET_OPCODE(*instr, BINARY_OP_ADD_UNICODE);
+ _py_set_opocde(instr, BINARY_OP_ADD_UNICODE);
goto success;
}
if (PyLong_CheckExact(lhs)) {
- _Py_SET_OPCODE(*instr, BINARY_OP_ADD_INT);
+ _py_set_opocde(instr, BINARY_OP_ADD_INT);
goto success;
}
if (PyFloat_CheckExact(lhs)) {
- _Py_SET_OPCODE(*instr, BINARY_OP_ADD_FLOAT);
+ _py_set_opocde(instr, BINARY_OP_ADD_FLOAT);
goto success;
}
break;
@@ -1853,11 +1867,11 @@ _Py_Specialize_BinaryOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr,
break;
}
if (PyLong_CheckExact(lhs)) {
- _Py_SET_OPCODE(*instr, BINARY_OP_MULTIPLY_INT);
+ _py_set_opocde(instr, BINARY_OP_MULTIPLY_INT);
goto success;
}
if (PyFloat_CheckExact(lhs)) {
- _Py_SET_OPCODE(*instr, BINARY_OP_MULTIPLY_FLOAT);
+ _py_set_opocde(instr, BINARY_OP_MULTIPLY_FLOAT);
goto success;
}
break;
@@ -1867,18 +1881,18 @@ _Py_Specialize_BinaryOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr,
break;
}
if (PyLong_CheckExact(lhs)) {
- _Py_SET_OPCODE(*instr, BINARY_OP_SUBTRACT_INT);
+ _py_set_opocde(instr, BINARY_OP_SUBTRACT_INT);
goto success;
}
if (PyFloat_CheckExact(lhs)) {
- _Py_SET_OPCODE(*instr, BINARY_OP_SUBTRACT_FLOAT);
+ _py_set_opocde(instr, BINARY_OP_SUBTRACT_FLOAT);
goto success;
}
break;
}
SPECIALIZATION_FAIL(BINARY_OP, binary_op_fail_kind(oparg, lhs, rhs));
STAT_INC(BINARY_OP, failure);
- _Py_SET_OPCODE(*instr, BINARY_OP);
+ _py_set_opocde(instr, BINARY_OP);
cache->counter = adaptive_counter_backoff(cache->counter);
return;
success:
@@ -1960,13 +1974,13 @@ _Py_Specialize_CompareOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr,
goto failure;
}
if (PyFloat_CheckExact(lhs)) {
- _Py_SET_OPCODE(*instr, COMPARE_OP_FLOAT_JUMP);
+ _py_set_opocde(instr, COMPARE_OP_FLOAT_JUMP);
cache->mask = when_to_jump_mask;
goto success;
}
if (PyLong_CheckExact(lhs)) {
if (Py_ABS(Py_SIZE(lhs)) <= 1 && Py_ABS(Py_SIZE(rhs)) <= 1) {
- _Py_SET_OPCODE(*instr, COMPARE_OP_INT_JUMP);
+ _py_set_opocde(instr, COMPARE_OP_INT_JUMP);
cache->mask = when_to_jump_mask;
goto success;
}
@@ -1981,7 +1995,7 @@ _Py_Specialize_CompareOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr,
goto failure;
}
else {
- _Py_SET_OPCODE(*instr, COMPARE_OP_STR_JUMP);
+ _py_set_opocde(instr, COMPARE_OP_STR_JUMP);
cache->mask = (when_to_jump_mask & 2) == 0;
goto success;
}
@@ -1989,7 +2003,7 @@ _Py_Specialize_CompareOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr,
SPECIALIZATION_FAIL(COMPARE_OP, compare_op_fail_kind(lhs, rhs));
failure:
STAT_INC(COMPARE_OP, failure);
- _Py_SET_OPCODE(*instr, COMPARE_OP);
+ _py_set_opocde(instr, COMPARE_OP);
cache->counter = adaptive_counter_backoff(cache->counter);
return;
success:
@@ -2023,10 +2037,10 @@ _Py_Specialize_UnpackSequence(PyObject *seq, _Py_CODEUNIT *instr, int oparg)
goto failure;
}
if (PyTuple_GET_SIZE(seq) == 2) {
- _Py_SET_OPCODE(*instr, UNPACK_SEQUENCE_TWO_TUPLE);
+ _py_set_opocde(instr, UNPACK_SEQUENCE_TWO_TUPLE);
goto success;
}
- _Py_SET_OPCODE(*instr, UNPACK_SEQUENCE_TUPLE);
+ _py_set_opocde(instr, UNPACK_SEQUENCE_TUPLE);
goto success;
}
if (PyList_CheckExact(seq)) {
@@ -2034,13 +2048,13 @@ _Py_Specialize_UnpackSequence(PyObject *seq, _Py_CODEUNIT *instr, int oparg)
SPECIALIZATION_FAIL(UNPACK_SEQUENCE, SPEC_FAIL_EXPECTED_ERROR);
goto failure;
}
- _Py_SET_OPCODE(*instr, UNPACK_SEQUENCE_LIST);
+ _py_set_opocde(instr, UNPACK_SEQUENCE_LIST);
goto success;
}
SPECIALIZATION_FAIL(UNPACK_SEQUENCE, unpack_sequence_fail_kind(seq));
failure:
STAT_INC(UNPACK_SEQUENCE, failure);
- _Py_SET_OPCODE(*instr, UNPACK_SEQUENCE);
+ _py_set_opocde(instr, UNPACK_SEQUENCE);
cache->counter = adaptive_counter_backoff(cache->counter);
return;
success:
@@ -2129,22 +2143,26 @@ _Py_Specialize_ForIter(PyObject *iter, _Py_CODEUNIT *instr, int oparg)
_Py_CODEUNIT next = instr[1+INLINE_CACHE_ENTRIES_FOR_ITER];
int next_op = _PyOpcode_Deopt[_Py_OPCODE(next)];
if (tp == &PyListIter_Type) {
- _Py_SET_OPCODE(*instr, FOR_ITER_LIST);
+ _py_set_opocde(instr, FOR_ITER_LIST);
+ goto success;
+ }
+ else if (tp == &PyTupleIter_Type) {
+ _py_set_opocde(instr, FOR_ITER_TUPLE);
goto success;
}
else if (tp == &PyRangeIter_Type && next_op == STORE_FAST) {
- _Py_SET_OPCODE(*instr, FOR_ITER_RANGE);
+ _py_set_opocde(instr, FOR_ITER_RANGE);
goto success;
}
else if (tp == &PyGen_Type && oparg <= SHRT_MAX) {
assert(_Py_OPCODE(instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1]) == END_FOR);
- _Py_SET_OPCODE(*instr, FOR_ITER_GEN);
+ _py_set_opocde(instr, FOR_ITER_GEN);
goto success;
}
SPECIALIZATION_FAIL(FOR_ITER,
_PySpecialization_ClassifyIterator(iter));
STAT_INC(FOR_ITER, failure);
- _Py_SET_OPCODE(*instr, FOR_ITER);
+ _py_set_opocde(instr, FOR_ITER);
cache->counter = adaptive_counter_backoff(cache->counter);
return;
success:
diff --git a/Python/sysmodule.c b/Python/sysmodule.c
index 88f806e616f27e..91f5c487c98fe3 100644
--- a/Python/sysmodule.c
+++ b/Python/sysmodule.c
@@ -950,10 +950,6 @@ static int
profile_trampoline(PyObject *self, PyFrameObject *frame,
int what, PyObject *arg)
{
- if (arg == NULL) {
- arg = Py_None;
- }
-
PyThreadState *tstate = _PyThreadState_GET();
PyObject *result = call_trampoline(tstate, self, frame, what, arg);
if (result == NULL) {
diff --git a/Python/thread.c b/Python/thread.c
index 3c1e78ed1bca83..4581f1af043a37 100644
--- a/Python/thread.c
+++ b/Python/thread.c
@@ -8,15 +8,7 @@
#include "Python.h"
#include "pycore_pystate.h" // _PyInterpreterState_GET()
#include "pycore_structseq.h" // _PyStructSequence_FiniType()
-
-#ifndef _POSIX_THREADS
-/* This means pthreads are not implemented in libc headers, hence the macro
- not present in unistd.h. But they still can be implemented as an external
- library (e.g. gnu pth in pthread emulation) */
-# ifdef HAVE_PTHREAD_H
-# include /* _POSIX_THREADS */
-# endif
-#endif
+#include "pycore_pythread.h"
#ifndef DONT_HAVE_STDIO_H
#include
@@ -24,33 +16,17 @@
#include
-#ifndef _POSIX_THREADS
-
-/* Check if we're running on HP-UX and _SC_THREADS is defined. If so, then
- enough of the Posix threads package is implemented to support python
- threads.
-
- This is valid for HP-UX 11.23 running on an ia64 system. If needed, add
- a check of __ia64 to verify that we're running on an ia64 system instead
- of a pa-risc system.
-*/
-#ifdef __hpux
-#ifdef _SC_THREADS
-#define _POSIX_THREADS
-#endif
-#endif
-
-#endif /* _POSIX_THREADS */
-
-static int initialized;
static void PyThread__init_thread(void); /* Forward */
+#define initialized _PyRuntime.threads.initialized
+
void
PyThread_init_thread(void)
{
- if (initialized)
+ if (initialized) {
return;
+ }
initialized = 1;
PyThread__init_thread();
}
@@ -58,7 +34,7 @@ PyThread_init_thread(void)
#if defined(HAVE_PTHREAD_STUBS)
# define PYTHREAD_NAME "pthread-stubs"
# include "thread_pthread_stubs.h"
-#elif defined(_POSIX_THREADS)
+#elif defined(_USE_PTHREADS) /* AKA _PTHREADS */
# if defined(__EMSCRIPTEN__) && !defined(__EMSCRIPTEN_PTHREADS__)
# define PYTHREAD_NAME "pthread-stubs"
# else
diff --git a/Python/thread_nt.h b/Python/thread_nt.h
index d1f1323948a6c6..26f441bd6d3c56 100644
--- a/Python/thread_nt.h
+++ b/Python/thread_nt.h
@@ -152,11 +152,12 @@ unsigned long PyThread_get_thread_native_id(void);
#endif
/*
- * Initialization of the C package, should not be needed.
+ * Initialization for the current runtime.
*/
static void
PyThread__init_thread(void)
{
+ // Initialization of the C package should not be needed.
}
/*
diff --git a/Python/thread_pthread.h b/Python/thread_pthread.h
index 1c5b320813af83..76d6f3bcdf9c40 100644
--- a/Python/thread_pthread.h
+++ b/Python/thread_pthread.h
@@ -119,24 +119,21 @@
* pthread_cond support
*/
-#if defined(HAVE_PTHREAD_CONDATTR_SETCLOCK) && defined(HAVE_CLOCK_GETTIME) && defined(CLOCK_MONOTONIC)
-// monotonic is supported statically. It doesn't mean it works on runtime.
-#define CONDATTR_MONOTONIC
-#endif
-
-// NULL when pthread_condattr_setclock(CLOCK_MONOTONIC) is not supported.
-static pthread_condattr_t *condattr_monotonic = NULL;
+#define condattr_monotonic _PyRuntime.threads._condattr_monotonic.ptr
static void
init_condattr(void)
{
#ifdef CONDATTR_MONOTONIC
- static pthread_condattr_t ca;
+# define ca _PyRuntime.threads._condattr_monotonic.val
+ // XXX We need to check the return code?
pthread_condattr_init(&ca);
+ // XXX We need to run pthread_condattr_destroy() during runtime fini.
if (pthread_condattr_setclock(&ca, CLOCK_MONOTONIC) == 0) {
condattr_monotonic = &ca; // Use monotonic clock
}
-#endif
+# undef ca
+#endif // CONDATTR_MONOTONIC
}
int
@@ -192,15 +189,21 @@ typedef struct {
"%s: %s\n", name, strerror(status)); error = 1; }
/*
- * Initialization.
+ * Initialization for the current runtime.
*/
static void
PyThread__init_thread(void)
{
+ // The library is only initialized once in the process,
+ // regardless of how many times the Python runtime is initialized.
+ static int lib_initialized = 0;
+ if (!lib_initialized) {
+ lib_initialized = 1;
#if defined(_AIX) && defined(__GNUC__)
- extern void pthread_init(void);
- pthread_init();
+ extern void pthread_init(void);
+ pthread_init();
#endif
+ }
init_condattr();
}
diff --git a/Python/thread_pthread_stubs.h b/Python/thread_pthread_stubs.h
index 8b80c0f87e2509..56e5b6141924b4 100644
--- a/Python/thread_pthread_stubs.h
+++ b/Python/thread_pthread_stubs.h
@@ -124,13 +124,10 @@ pthread_attr_destroy(pthread_attr_t *attr)
return 0;
}
-// pthread_key
-typedef struct {
- bool in_use;
- void *value;
-} py_tls_entry;
-static py_tls_entry py_tls_entries[PTHREAD_KEYS_MAX] = {0};
+typedef struct py_stub_tls_entry py_tls_entry;
+
+#define py_tls_entries (_PyRuntime.threads.stubs.tls_entries)
int
pthread_key_create(pthread_key_t *key, void (*destr_function)(void *))
diff --git a/README.rst b/README.rst
index 1fa019473643d9..ab9d3a6ea71cf6 100644
--- a/README.rst
+++ b/README.rst
@@ -1,4 +1,4 @@
-This is Python version 3.12.0 alpha 2
+This is Python version 3.12.0 alpha 3
=====================================
.. image:: https://github.com/python/cpython/workflows/Tests/badge.svg
diff --git a/Tools/build/deepfreeze.py b/Tools/build/deepfreeze.py
index 2eef649437a680..7f4e24280133f2 100644
--- a/Tools/build/deepfreeze.py
+++ b/Tools/build/deepfreeze.py
@@ -44,6 +44,7 @@ def make_string_literal(b: bytes) -> str:
CO_FAST_CELL = 0x40
CO_FAST_FREE = 0x80
+next_code_version = 1
def get_localsplus(code: types.CodeType):
a = collections.defaultdict(int)
@@ -227,6 +228,7 @@ def generate_unicode(self, name: str, s: str) -> str:
def generate_code(self, name: str, code: types.CodeType) -> str:
+ global next_code_version
# The ordering here matches PyCode_NewWithPosOnlyArgs()
# (but see below).
co_consts = self.generate(name + "_consts", code.co_consts)
@@ -268,6 +270,8 @@ def generate_code(self, name: str, code: types.CodeType) -> str:
self.write(f".co_nplaincellvars = {nplaincellvars},")
self.write(f".co_ncellvars = {ncellvars},")
self.write(f".co_nfreevars = {nfreevars},")
+ self.write(f".co_version = {next_code_version},")
+ next_code_version += 1
self.write(f".co_localsplusnames = {co_localsplusnames},")
self.write(f".co_localspluskinds = {co_localspluskinds},")
self.write(f".co_filename = {co_filename},")
@@ -461,6 +465,7 @@ def generate(args: list[str], output: TextIO) -> None:
with printer.block(f"if ({p} < 0)"):
printer.write("return -1;")
printer.write("return 0;")
+ printer.write(f"\nuint32_t _Py_next_func_version = {next_code_version};\n")
if verbose:
print(f"Cache hits: {printer.hits}, misses: {printer.misses}")
diff --git a/Tools/c-analyzer/cpython/globals-to-fix.tsv b/Tools/c-analyzer/cpython/globals-to-fix.tsv
index cc465134a9e065..479221cbd4b682 100644
--- a/Tools/c-analyzer/cpython/globals-to-fix.tsv
+++ b/Tools/c-analyzer/cpython/globals-to-fix.tsv
@@ -4,10 +4,10 @@ filename funcname name reason
# These are all variables that we will be making non-global.
##################################
-# global objects to fix in core code
+## global objects to fix in core code
-#-----------------------
-# exported builtin types (C-API)
+##-----------------------
+## exported builtin types (C-API)
Objects/boolobject.c - PyBool_Type -
Objects/bytearrayobject.c - PyByteArrayIter_Type -
@@ -102,8 +102,8 @@ Python/context.c - PyContextVar_Type -
Python/context.c - PyContext_Type -
Python/traceback.c - PyTraceBack_Type -
-#-----------------------
-# other exported builtin types
+##-----------------------
+## other exported builtin types
# Not in a .h file:
Objects/codeobject.c - _PyLineIterator -
@@ -126,8 +126,8 @@ Python/hamt.c - _PyHamt_CollisionNode_Type -
Python/hamt.c - _PyHamt_Type -
Python/symtable.c - PySTEntry_Type -
-#-----------------------
-# private static builtin types
+##-----------------------
+## private static builtin types
Objects/setobject.c - _PySetDummy_Type -
Objects/stringlib/unicode_format.h - PyFormatterIter_Type -
@@ -136,8 +136,8 @@ Objects/unicodeobject.c - EncodingMapType -
#Objects/unicodeobject.c - PyFieldNameIter_Type -
#Objects/unicodeobject.c - PyFormatterIter_Type -
-#-----------------------
-# static builtin structseq
+##-----------------------
+## static builtin structseq
Objects/floatobject.c - FloatInfoType -
Objects/longobject.c - Int_InfoType -
@@ -148,8 +148,8 @@ Python/sysmodule.c - Hash_InfoType -
Python/sysmodule.c - VersionInfoType -
Python/thread.c - ThreadInfoType -
-#-----------------------
-# builtin exception types
+##-----------------------
+## builtin exception types
Objects/exceptions.c - _PyExc_BaseException -
Objects/exceptions.c - _PyExc_BaseExceptionGroup -
@@ -286,8 +286,8 @@ Objects/exceptions.c - PyExc_BytesWarning -
Objects/exceptions.c - PyExc_ResourceWarning -
Objects/exceptions.c - PyExc_EncodingWarning -
-#-----------------------
-# singletons
+##-----------------------
+## singletons
Objects/boolobject.c - _Py_FalseStruct -
Objects/boolobject.c - _Py_TrueStruct -
@@ -300,39 +300,16 @@ Objects/sliceobject.c - _Py_EllipsisObject -
##################################
-# global non-objects to fix in core code
+## global non-objects to fix in core code
-#-----------------------
-# effectively-const but initialized lazily
-
-# idempotent
-Python/dtoa.c - p5s -
-Objects/obmalloc.c new_arena debug_stats -
-
-# others
-Python/perf_trampoline.c - perf_map_file -
-Objects/unicodeobject.c - ucnhash_capi -
-
-#-----------------------
-# state
-
-# local buffer
-Python/suggestions.c levenshtein_distance buffer -
-
-# other
-Objects/object.c - _Py_RefTotal -
-Python/perf_trampoline.c - perf_status -
-Python/perf_trampoline.c - extra_code_index -
-Python/perf_trampoline.c - code_arena -
-Python/perf_trampoline.c - trampoline_api -
-Python/thread_pthread_stubs.h - py_tls_entries -
+#
##################################
-# global objects to fix in builtin modules
+## global objects to fix in builtin modules
-#-----------------------
-# static types
+##-----------------------
+## static types
Modules/_collectionsmodule.c - defdict_type -
Modules/_collectionsmodule.c - deque_type -
@@ -381,57 +358,18 @@ Modules/itertoolsmodule.c - tee_type -
Modules/itertoolsmodule.c - teedataobject_type -
Modules/itertoolsmodule.c - ziplongest_type -
-#-----------------------
-# other
-
-# state
-Modules/faulthandler.c - fatal_error -
-Modules/faulthandler.c - thread -
-Modules/faulthandler.c - user_signals -
-Modules/faulthandler.c - stack -
-Modules/faulthandler.c - old_stack -
-
##################################
-# global non-objects to fix in builtin modules
-
-#-----------------------
-# initialized once
-
-Modules/_io/bufferedio.c _PyIO_trap_eintr eintr_int -
-Modules/posixmodule.c os_dup2_impl dup3_works -
-Modules/posixmodule.c - structseq_new -
-Modules/posixmodule.c - ticks_per_second -
-Modules/timemodule.c _PyTime_GetClockWithInfo initialized -
-Modules/timemodule.c _PyTime_GetProcessTimeWithInfo ticks_per_second -
-
-#-----------------------
-# state
-
-Modules/_tracemalloc.c - allocators -
-Modules/_tracemalloc.c - tables_lock -
-Modules/_tracemalloc.c - tracemalloc_empty_traceback -
-Modules/_tracemalloc.c - tracemalloc_traced_memory -
-Modules/_tracemalloc.c - tracemalloc_peak_traced_memory -
-Modules/_tracemalloc.c - tracemalloc_filenames -
-Modules/_tracemalloc.c - tracemalloc_traceback -
-Modules/_tracemalloc.c - tracemalloc_tracebacks -
-Modules/_tracemalloc.c - tracemalloc_traces -
-Modules/_tracemalloc.c - tracemalloc_domains -
-Modules/_tracemalloc.c - tracemalloc_reentrant_key -
-Modules/faulthandler.c faulthandler_dump_traceback reentrant -
-Modules/posixmodule.c - environ -
-Modules/signalmodule.c - is_tripped -
-Modules/signalmodule.c - signal_global_state -
-Modules/signalmodule.c - wakeup -
-Modules/signalmodule.c - Handlers -
+## global non-objects to fix in builtin modules
+
+#
##################################
-# global objects to fix in extension modules
+## global objects to fix in extension modules
-#-----------------------
-# static types
+##-----------------------
+## static types
Modules/_asynciomodule.c - FutureIterType -
Modules/_asynciomodule.c - FutureType -
@@ -496,7 +434,6 @@ Modules/_pickle.c - PicklerMemoProxyType -
Modules/_pickle.c - Pickler_Type -
Modules/_pickle.c - UnpicklerMemoProxyType -
Modules/_pickle.c - Unpickler_Type -
-Modules/_xxsubinterpretersmodule.c - ChannelIDtype -
Modules/_zoneinfo.c - PyZoneInfo_ZoneInfoType -
Modules/ossaudiodev.c - OSSAudioType -
Modules/ossaudiodev.c - OSSMixerType -
@@ -507,10 +444,10 @@ Modules/xxmodule.c - Xxo_Type -
Modules/xxsubtype.c - spamdict_type -
Modules/xxsubtype.c - spamlist_type -
-#-----------------------
-# non-static types - initialized once
+##-----------------------
+## non-static types - initialized once
-# heap types
+## heap types
Modules/_decimal/_decimal.c - DecimalTuple -
Modules/_decimal/_decimal.c - PyDecSignalDict_Type -
Modules/_tkinter.c - PyTclObject_Type -
@@ -518,32 +455,26 @@ Modules/_tkinter.c - Tkapp_Type -
Modules/_tkinter.c - Tktt_Type -
Modules/xxlimited_35.c - Xxo_Type -
-# exception types
+## exception types
Modules/_ctypes/_ctypes.c - PyExc_ArgError -
Modules/_cursesmodule.c - PyCursesError -
Modules/_decimal/_decimal.c - DecimalException -
Modules/_tkinter.c - Tkinter_TclError -
-Modules/_xxsubinterpretersmodule.c - ChannelError -
-Modules/_xxsubinterpretersmodule.c - ChannelNotFoundError -
-Modules/_xxsubinterpretersmodule.c - ChannelClosedError -
-Modules/_xxsubinterpretersmodule.c - ChannelEmptyError -
-Modules/_xxsubinterpretersmodule.c - ChannelNotEmptyError -
-Modules/_xxsubinterpretersmodule.c - RunFailedError -
Modules/ossaudiodev.c - OSSAudioError -
Modules/socketmodule.c - socket_herror -
Modules/socketmodule.c - socket_gaierror -
Modules/xxlimited_35.c - ErrorObject -
Modules/xxmodule.c - ErrorObject -
-#-----------------------
-# cached - initialized once
+##-----------------------
+## cached - initialized once
-# manually cached PyUnicodeOjbect
+## manually cached PyUnicodeOjbect
Modules/_asynciomodule.c - context_kwname -
Modules/_ctypes/callproc.c _ctypes_get_errobj error_object_name -
Modules/_ctypes/_ctypes.c CreateSwappedType suffix -
-# other - during module init
+## other - during module init
Modules/_asynciomodule.c - asyncio_mod -
Modules/_asynciomodule.c - traceback_extract_stack -
Modules/_asynciomodule.c - asyncio_future_repr_func -
@@ -558,10 +489,10 @@ Modules/_zoneinfo.c - io_open -
Modules/_zoneinfo.c - _tzpath_find_tzfile -
Modules/_zoneinfo.c - _common_mod -
-#-----------------------
-# other
+##-----------------------
+## other
-# initialized once
+## initialized once
Modules/_ctypes/_ctypes.c - _unpickle -
Modules/_ctypes/_ctypes.c PyCArrayType_from_ctype cache -
Modules/_cursesmodule.c - ModDict -
@@ -584,7 +515,7 @@ Modules/_decimal/_decimal.c - Rational -
Modules/_decimal/_decimal.c - SignalTuple -
Modules/arraymodule.c array_array___reduce_ex___impl array_reconstructor -
-# state
+## state
Modules/_asynciomodule.c - cached_running_holder -
Modules/_asynciomodule.c - fi_freelist -
Modules/_asynciomodule.c - fi_freelist_len -
@@ -599,20 +530,19 @@ Modules/_tkinter.c - valInCmd -
Modules/_tkinter.c - trbInCmd -
Modules/_zoneinfo.c - TIMEDELTA_CACHE -
Modules/_zoneinfo.c - ZONEINFO_WEAK_CACHE -
-Modules/syslogmodule.c - S_ident_o -
##################################
-# global non-objects to fix in extension modules
+## global non-objects to fix in extension modules
-#-----------------------
-# initialized once
+##-----------------------
+## initialized once
-# pre-allocated buffer
+## pre-allocated buffer
Modules/nismodule.c nisproc_maplist_2 res -
Modules/pyexpat.c PyUnknownEncodingHandler template_buffer -
-# other
+## other
Include/datetime.h - PyDateTimeAPI -
Modules/_asynciomodule.c - module_initialized -
Modules/_ctypes/cfield.c _ctypes_get_fielddesc initialized -
@@ -657,8 +587,8 @@ Modules/readline.c - libedit_history_start -
Modules/socketmodule.c - accept4_works -
Modules/socketmodule.c - sock_cloexec_works -
-#-----------------------
-# state
+##-----------------------
+## state
Modules/_asynciomodule.c - cached_running_holder_tsid -
Modules/_asynciomodule.c - task_name_counter -
@@ -688,4 +618,3 @@ Modules/readline.c - completed_input_string -
Modules/rotatingtree.c - random_stream -
Modules/rotatingtree.c - random_value -
Modules/socketmodule.c - defaulttimeout -
-Modules/syslogmodule.c - S_log_open -
diff --git a/Tools/c-analyzer/cpython/ignored.tsv b/Tools/c-analyzer/cpython/ignored.tsv
index 242deace8c945d..c71fc0d958216c 100644
--- a/Tools/c-analyzer/cpython/ignored.tsv
+++ b/Tools/c-analyzer/cpython/ignored.tsv
@@ -5,52 +5,62 @@ filename funcname name reason
##################################
-# process-global resources
+## process-global values - set once
-# Initialization for these should be idempotent.
+# These will never re-initialize (but would be idempotent).
+# These are effectively const.
-#-----------------------
-# effectively const, set once before/during first init
+##-----------------------
+## process-global resources
-Modules/getbuildinfo.c - buildinfo -
-Modules/getbuildinfo.c - initialized -
-Python/getversion.c - initialized -
-Python/getversion.c - version -
-
-#-----------------------
-# effectively const, set once during first init
-
-Python/bootstrap_hash.c - _Py_HashSecret_Initialized -
-Python/pyhash.c - _Py_HashSecret -
-Python/thread.c - initialized -
-Python/thread_pthread.h - condattr_monotonic -
-
-# safe static buffer used during one-time initialization
-Python/thread_pthread.h init_condattr ca -
-
-# indicators for process-global resource availability/capability
+## indicators for resource availability/capability
+# (set during first init)
Python/bootstrap_hash.c py_getrandom getrandom_works -
Python/fileutils.c - _Py_open_cloexec_works -
Python/fileutils.c set_inheritable ioctl_works -
+# (set lazily, *after* first init)
+# XXX Is this thread-safe?
+Modules/posixmodule.c os_dup2_impl dup3_works -
-#-----------------------
-# effectively const but set once lazily (*after* first init)
+## guards around resource init
+Python/thread_pthread.h PyThread__init_thread lib_initialized -
+##-----------------------
+## other values (not Python-specific)
+
+## cached computed data - set lazily (*after* first init)
+# XXX Are these safe relative to write races?
Objects/longobject.c long_from_non_binary_base log_base_BASE -
Objects/longobject.c long_from_non_binary_base convwidth_base -
Objects/longobject.c long_from_non_binary_base convmultmax_base -
Objects/unicodeobject.c - bloom_linebreak -
+# This is safe:
Objects/unicodeobject.c _init_global_state initialized -
-# XXX Move to _PyRuntimeState?
+##-----------------------
+## other values (Python-specific)
+
+## internal state - set before/during first init
+Modules/getbuildinfo.c - buildinfo -
+Modules/getbuildinfo.c - initialized -
+Python/getversion.c - initialized -
+Python/getversion.c - version -
+
+## public C-API - set during first init
+Python/bootstrap_hash.c - _Py_HashSecret_Initialized -
+Python/pyhash.c - _Py_HashSecret -
+
+## internal state - set lazily (*after* first init)
+# XXX Move to _PyRuntimeState (i.e. tie to init/fini cycle)?
Parser/action_helpers.c _PyPegen_dummy_name cache -
##################################
-# state tied to C main() (only in main thread)
+## state tied to Py_Main()
+# (only in main thread)
-#-----------------------
-# handling C argv
+##-----------------------
+## handling C argv
Python/getopt.c - _PyOS_optarg -
Python/getopt.c - _PyOS_opterr -
@@ -58,8 +68,8 @@ Python/getopt.c - _PyOS_optind -
Python/getopt.c - opt_ptr -
Python/pathconfig.c - _Py_path_config -
-#-----------------------
-# REPL
+##-----------------------
+## REPL
Parser/myreadline.c - _PyOS_ReadlineLock -
Parser/myreadline.c - _PyOS_ReadlineTState -
@@ -68,28 +78,27 @@ Parser/myreadline.c - PyOS_ReadlineFunctionPointer -
##################################
-# state tied to each runtime init/fini cycle
+## runtime-global values - set once with each init
-Python/pylifecycle.c - _PyRuntime -
-Python/pylifecycle.c - runtime_initialized -
+# These are effectively const.
-# All uses of _PyArg_Parser are handled in c-analyzr/cpython/_analyzer.py.
+##-----------------------
+## set by embedders before init
+# (whether directly or through a call)
-#-----------------------
-# effectively const once init finishes
-
-# set by embedders before init (whether directly or through a call)
Python/initconfig.c - _Py_StandardStreamEncoding -
Python/initconfig.c - _Py_StandardStreamErrors -
-Python/initconfig.c - orig_argv -
-# deprecated
+##-----------------------
+## public C-API
+
+## deprecated
Python/preconfig.c - Py_FileSystemDefaultEncoding -
Python/preconfig.c - Py_HasFileSystemDefaultEncoding -
Python/preconfig.c - Py_FileSystemDefaultEncodeErrors -
Python/preconfig.c - _Py_HasFileSystemDefaultEncodeErrors -
-# legacy config flags
+## legacy config flags
Python/initconfig.c - Py_UTF8Mode -
Python/initconfig.c - Py_DebugFlag -
Python/initconfig.c - Py_VerboseFlag -
@@ -109,373 +118,68 @@ Python/initconfig.c - Py_IsolatedFlag -
Python/initconfig.c - Py_LegacyWindowsFSEncodingFlag -
Python/initconfig.c - Py_LegacyWindowsStdioFlag -
-# initialized statically, customized by embedders
+##-----------------------
+## initialized statically, may be customized by embedders
+
Python/frozen.c - PyImport_FrozenModules -
Python/import.c - inittab_copy -
Python/import.c - PyImport_Inittab -
-# used temporarily during init
-Python/sysmodule.c - _preinit_warnoptions -
-Python/sysmodule.c - _preinit_xoptions -
-
##################################
-# special-use diagnistic state
+## runtime-global state
-Parser/pegen.c - memo_statistics -
+##-----------------------
+## tied to each init/fini cycle
+## the consolidated runtime state
+Python/pylifecycle.c - _PyRuntime -
+Python/pylifecycle.c - runtime_initialized -
-##################################
-# one-off temporary state
+# All cases of _PyArg_Parser are handled in c-analyzr/cpython/_analyzer.py.
-# This is safe enough.
-Python/pylifecycle.c _Py_FatalErrorFormat reentrant -
-Python/pylifecycle.c fatal_error reentrant -
+## main interp state in stdlib modules
+Modules/syslogmodule.c - S_ident_o -
+Modules/syslogmodule.c - S_log_open -
+##-----------------------
+## kept for stable ABI compatibility
-##################################
-# not used (kept for compatibility)
+# XXX should be per-interpreter, without impacting stable ABI extensions
+Objects/object.c - _Py_RefTotal -
-Python/pyfpe.c - PyFPE_counter -
+##-----------------------
+## one-off temporary state
+
+# used during runtime init
+Python/sysmodule.c - _preinit_warnoptions -
+Python/sysmodule.c - _preinit_xoptions -
+
+# thread-safety
+# XXX need race protection?
+Modules/faulthandler.c faulthandler_dump_traceback reentrant -
+Python/pylifecycle.c _Py_FatalErrorFormat reentrant -
+Python/pylifecycle.c fatal_error reentrant -
##################################
-# The analyzer should have ignored these.
-# XXX Fix the analyzer.
+## not significant
-Modules/_io/_iomodule.c - _PyIO_Module -
-Modules/_sqlite/module.c - _sqlite3module -
+##-----------------------
+## not used (kept for compatibility)
-# forward/extern references
+Python/pyfpe.c - PyFPE_counter -
-Include/py_curses.h - PyCurses_API -
-Include/pydecimal.h - _decimal_api -
-Modules/_blake2/blake2module.c - blake2b_type_spec -
-Modules/_blake2/blake2module.c - blake2s_type_spec -
-Modules/_io/fileio.c - _Py_open_cloexec_works -
-Modules/_io/_iomodule.h - PyIOBase_Type -
-Modules/_io/_iomodule.h - PyRawIOBase_Type -
-Modules/_io/_iomodule.h - PyBufferedIOBase_Type -
-Modules/_io/_iomodule.h - PyTextIOBase_Type -
-Modules/_io/_iomodule.h - PyFileIO_Type -
-Modules/_io/_iomodule.h - PyBytesIO_Type -
-Modules/_io/_iomodule.h - PyStringIO_Type -
-Modules/_io/_iomodule.h - PyBufferedReader_Type -
-Modules/_io/_iomodule.h - PyBufferedWriter_Type -
-Modules/_io/_iomodule.h - PyBufferedRWPair_Type -
-Modules/_io/_iomodule.h - PyBufferedRandom_Type -
-Modules/_io/_iomodule.h - PyTextIOWrapper_Type -
-Modules/_io/_iomodule.h - PyIncrementalNewlineDecoder_Type -
-Modules/_io/_iomodule.h - _PyBytesIOBuffer_Type -
-Modules/_io/_iomodule.h - _PyIO_Module -
-Modules/_io/_iomodule.h - _PyIO_str_close -
-Modules/_io/_iomodule.h - _PyIO_str_closed -
-Modules/_io/_iomodule.h - _PyIO_str_decode -
-Modules/_io/_iomodule.h - _PyIO_str_encode -
-Modules/_io/_iomodule.h - _PyIO_str_fileno -
-Modules/_io/_iomodule.h - _PyIO_str_flush -
-Modules/_io/_iomodule.h - _PyIO_str_getstate -
-Modules/_io/_iomodule.h - _PyIO_str_isatty -
-Modules/_io/_iomodule.h - _PyIO_str_newlines -
-Modules/_io/_iomodule.h - _PyIO_str_nl -
-Modules/_io/_iomodule.h - _PyIO_str_peek -
-Modules/_io/_iomodule.h - _PyIO_str_read -
-Modules/_io/_iomodule.h - _PyIO_str_read1 -
-Modules/_io/_iomodule.h - _PyIO_str_readable -
-Modules/_io/_iomodule.h - _PyIO_str_readall -
-Modules/_io/_iomodule.h - _PyIO_str_readinto -
-Modules/_io/_iomodule.h - _PyIO_str_readline -
-Modules/_io/_iomodule.h - _PyIO_str_reset -
-Modules/_io/_iomodule.h - _PyIO_str_seek -
-Modules/_io/_iomodule.h - _PyIO_str_seekable -
-Modules/_io/_iomodule.h - _PyIO_str_setstate -
-Modules/_io/_iomodule.h - _PyIO_str_tell -
-Modules/_io/_iomodule.h - _PyIO_str_truncate -
-Modules/_io/_iomodule.h - _PyIO_str_writable -
-Modules/_io/_iomodule.h - _PyIO_str_write -
-Modules/_io/_iomodule.h - _PyIO_empty_str -
-Modules/_io/_iomodule.h - _PyIO_empty_bytes -
-Modules/_multiprocessing/multiprocessing.h - _PyMp_SemLockType -
-Modules/_sqlite/module.c - _pysqlite_converters -
-Modules/_sqlite/module.c - _pysqlite_enable_callback_tracebacks -
-Modules/_sqlite/module.c - pysqlite_BaseTypeAdapted -
-Modules/_sqlite/module.h - pysqlite_global_state -
-Modules/_testcapimodule.c - _PyBytesIOBuffer_Type -
-Modules/posixmodule.c - _Py_open_cloexec_works -
-Objects/object.c - _Py_GenericAliasIterType -
-Objects/object.c - _PyMemoryIter_Type -
-Objects/object.c - _PyLineIterator -
-Objects/object.c - _PyPositionsIterator -
-Python/perf_trampoline.c - _Py_trampoline_func_start -
-Python/perf_trampoline.c - _Py_trampoline_func_end -
-Python/importdl.h - _PyImport_DynLoadFiletab -
+##-----------------------
+## should be const
+# XXX Make them const.
-Modules/expat/xmlrole.c - prolog0 -
-Modules/expat/xmlrole.c - prolog1 -
-Modules/expat/xmlrole.c - prolog2 -
-Modules/expat/xmlrole.c - doctype0 -
-Modules/expat/xmlrole.c - doctype1 -
-Modules/expat/xmlrole.c - doctype2 -
-Modules/expat/xmlrole.c - doctype3 -
-Modules/expat/xmlrole.c - doctype4 -
-Modules/expat/xmlrole.c - doctype5 -
-Modules/expat/xmlrole.c - internalSubset -
-Modules/expat/xmlrole.c - entity0 -
-Modules/expat/xmlrole.c - entity1 -
-Modules/expat/xmlrole.c - entity2 -
-Modules/expat/xmlrole.c - entity3 -
-Modules/expat/xmlrole.c - entity4 -
-Modules/expat/xmlrole.c - entity5 -
-Modules/expat/xmlrole.c - entity6 -
-Modules/expat/xmlrole.c - entity7 -
-Modules/expat/xmlrole.c - entity8 -
-Modules/expat/xmlrole.c - entity9 -
-Modules/expat/xmlrole.c - entity10 -
-Modules/expat/xmlrole.c - notation0 -
-Modules/expat/xmlrole.c - notation1 -
-Modules/expat/xmlrole.c - notation2 -
-Modules/expat/xmlrole.c - notation3 -
-Modules/expat/xmlrole.c - notation4 -
-Modules/expat/xmlrole.c - attlist0 -
-Modules/expat/xmlrole.c - attlist1 -
-Modules/expat/xmlrole.c - attlist2 -
-Modules/expat/xmlrole.c - attlist3 -
-Modules/expat/xmlrole.c - attlist4 -
-Modules/expat/xmlrole.c - attlist5 -
-Modules/expat/xmlrole.c - attlist6 -
-Modules/expat/xmlrole.c - attlist7 -
-Modules/expat/xmlrole.c - attlist8 -
-Modules/expat/xmlrole.c - attlist9 -
-Modules/expat/xmlrole.c - element0 -
-Modules/expat/xmlrole.c - element1 -
-Modules/expat/xmlrole.c - element2 -
-Modules/expat/xmlrole.c - element3 -
-Modules/expat/xmlrole.c - element4 -
-Modules/expat/xmlrole.c - element5 -
-Modules/expat/xmlrole.c - element6 -
-Modules/expat/xmlrole.c - element7 -
-Modules/expat/xmlrole.c - externalSubset0 -
-Modules/expat/xmlrole.c - externalSubset1 -
-Modules/expat/xmlrole.c - condSect0 -
-Modules/expat/xmlrole.c - condSect1 -
-Modules/expat/xmlrole.c - condSect2 -
-Modules/expat/xmlrole.c - declClose -
-Modules/expat/xmlrole.c - error -
+# These are all variables that we will be leaving global.
+# All module defs, type defs, etc. are handled in c-analyzr/cpython/_analyzer.py.
+# All kwlist arrays are handled in c-analyzr/cpython/_analyzer.py.
-##################################
-# test code
-
-Modules/_ctypes/_ctypes_test.c - _ctypes_test_slots -
-Modules/_ctypes/_ctypes_test.c - _ctypes_testmodule -
-Modules/_ctypes/_ctypes_test.c - _xxx_lib -
-Modules/_ctypes/_ctypes_test.c - an_integer -
-Modules/_ctypes/_ctypes_test.c - bottom -
-Modules/_ctypes/_ctypes_test.c - last_tf_arg_s -
-Modules/_ctypes/_ctypes_test.c - last_tf_arg_u -
-Modules/_ctypes/_ctypes_test.c - last_tfrsuv_arg -
-Modules/_ctypes/_ctypes_test.c - left -
-Modules/_ctypes/_ctypes_test.c - module_methods -
-Modules/_ctypes/_ctypes_test.c - my_eggs -
-Modules/_ctypes/_ctypes_test.c - my_spams -
-Modules/_ctypes/_ctypes_test.c - right -
-Modules/_ctypes/_ctypes_test.c - top -
-Modules/_testbuffer.c - NDArray_Type -
-Modules/_testbuffer.c - StaticArray_Type -
-Modules/_testbuffer.c - Struct -
-Modules/_testbuffer.c - _testbuffer_functions -
-Modules/_testbuffer.c - _testbuffermodule -
-Modules/_testbuffer.c - calcsize -
-Modules/_testbuffer.c - infobuf -
-Modules/_testbuffer.c - ndarray_as_buffer -
-Modules/_testbuffer.c - ndarray_as_mapping -
-Modules/_testbuffer.c - ndarray_as_sequence -
-Modules/_testbuffer.c - ndarray_getset -
-Modules/_testbuffer.c - ndarray_methods -
-Modules/_testbuffer.c - simple_fmt -
-Modules/_testbuffer.c - simple_format -
-Modules/_testbuffer.c - static_buffer -
-Modules/_testbuffer.c - static_mem -
-Modules/_testbuffer.c - static_shape -
-Modules/_testbuffer.c - static_strides -
-Modules/_testbuffer.c - staticarray_as_buffer -
-Modules/_testbuffer.c - structmodule -
-Modules/_testbuffer.c ndarray_init kwlist -
-Modules/_testbuffer.c ndarray_memoryview_from_buffer format -
-Modules/_testbuffer.c ndarray_memoryview_from_buffer info -
-Modules/_testbuffer.c ndarray_memoryview_from_buffer shape -
-Modules/_testbuffer.c ndarray_memoryview_from_buffer strides -
-Modules/_testbuffer.c ndarray_memoryview_from_buffer suboffsets -
-Modules/_testbuffer.c ndarray_push kwlist -
-Modules/_testbuffer.c staticarray_init kwlist -
-Modules/_testcapi/heaptype.c - _testcapimodule -
-Modules/_testcapi/unicode.c - _testcapimodule -
-Modules/_testcapimodule.c - ContainerNoGC_members -
-Modules/_testcapimodule.c - ContainerNoGC_type -
-Modules/_testcapimodule.c - FmData -
-Modules/_testcapimodule.c - FmHook -
-Modules/_testcapimodule.c - GenericAlias_Type -
-Modules/_testcapimodule.c - Generic_Type -
-Modules/_testcapimodule.c - HeapCTypeSetattr_slots -
-Modules/_testcapimodule.c - HeapCTypeSetattr_spec -
-Modules/_testcapimodule.c - HeapCTypeSubclassWithFinalizer_slots -
-Modules/_testcapimodule.c - HeapCTypeSubclassWithFinalizer_spec -
-Modules/_testcapimodule.c - HeapCTypeSubclass_slots -
-Modules/_testcapimodule.c - HeapCTypeSubclass_spec -
-Modules/_testcapimodule.c - HeapCTypeWithBuffer_slots -
-Modules/_testcapimodule.c - HeapCTypeWithBuffer_spec -
-Modules/_testcapimodule.c - HeapCTypeWithDict_slots -
-Modules/_testcapimodule.c - HeapCTypeWithDict_spec -
-Modules/_testcapimodule.c - HeapCTypeWithNegativeDict_slots -
-Modules/_testcapimodule.c - HeapCTypeWithNegativeDict_spec -
-Modules/_testcapimodule.c - HeapCTypeWithWeakref_slots -
-Modules/_testcapimodule.c - HeapCTypeWithWeakref_spec -
-Modules/_testcapimodule.c - HeapCType_slots -
-Modules/_testcapimodule.c - HeapCType_spec -
-Modules/_testcapimodule.c - HeapDocCType_slots -
-Modules/_testcapimodule.c - HeapDocCType_spec -
-Modules/_testcapimodule.c - HeapGcCType_slots -
-Modules/_testcapimodule.c - HeapGcCType_spec -
-Modules/_testcapimodule.c - MethClass_Type -
-Modules/_testcapimodule.c - MethInstance_Type -
-Modules/_testcapimodule.c - MethStatic_Type -
-Modules/_testcapimodule.c - MethodDescriptor2_Type -
-Modules/_testcapimodule.c - MethodDescriptorBase_Type -
-Modules/_testcapimodule.c - MethodDescriptorDerived_Type -
-Modules/_testcapimodule.c - MethodDescriptorNopGet_Type -
-Modules/_testcapimodule.c - MyList_Type -
-Modules/_testcapimodule.c - PyRecursingInfinitelyError_Type -
-Modules/_testcapimodule.c - TestError -
-Modules/_testcapimodule.c - TestMethods -
-Modules/_testcapimodule.c - _HashInheritanceTester_Type -
-Modules/_testcapimodule.c - _testcapimodule -
-Modules/_testcapimodule.c - awaitType -
-Modules/_testcapimodule.c - awaitType_as_async -
-Modules/_testcapimodule.c - capsule_context -
-Modules/_testcapimodule.c - capsule_destructor_call_count -
-Modules/_testcapimodule.c - capsule_error -
-Modules/_testcapimodule.c - capsule_name -
-Modules/_testcapimodule.c - capsule_pointer -
-Modules/_testcapimodule.c - decimal_initialized -
-Modules/_testcapimodule.c - generic_alias_methods -
-Modules/_testcapimodule.c - generic_methods -
-Modules/_testcapimodule.c - heapctype_members -
-Modules/_testcapimodule.c - heapctypesetattr_members -
-Modules/_testcapimodule.c - heapctypesubclass_members -
-Modules/_testcapimodule.c - heapctypewithdict_getsetlist -
-Modules/_testcapimodule.c - heapctypewithdict_members -
-Modules/_testcapimodule.c - heapctypewithnegativedict_members -
-Modules/_testcapimodule.c - heapctypewithweakref_members -
-Modules/_testcapimodule.c - ipowType -
-Modules/_testcapimodule.c - ipowType_as_number -
-Modules/_testcapimodule.c - matmulType -
-Modules/_testcapimodule.c - matmulType_as_number -
-Modules/_testcapimodule.c - meth_class_methods -
-Modules/_testcapimodule.c - meth_instance_methods -
-Modules/_testcapimodule.c - meth_static_methods -
-Modules/_testcapimodule.c - ml -
-Modules/_testcapimodule.c - str1 -
-Modules/_testcapimodule.c - str2 -
-Modules/_testcapimodule.c - test_members -
-Modules/_testcapimodule.c - test_run_counter -
-Modules/_testcapimodule.c - test_structmembersType -
-Modules/_testcapimodule.c - thread_done -
-Modules/_testcapimodule.c - x -
-Modules/_testcapimodule.c getargs_keyword_only keywords -
-Modules/_testcapimodule.c getargs_keywords keywords -
-Modules/_testcapimodule.c getargs_positional_only_and_keywords keywords -
-Modules/_testcapimodule.c getargs_s_hash_int2 keywords static char*[]
-Modules/_testcapimodule.c make_exception_with_doc kwlist -
-Modules/_testcapimodule.c raise_SIGINT_then_send_None PyId_send -
-Modules/_testcapimodule.c slot_tp_del PyId___tp_del__ -
-Modules/_testcapimodule.c test_capsule buffer -
-Modules/_testcapimodule.c test_empty_argparse kwlist -
-Modules/_testcapimodule.c test_structmembers_new keywords -
-Modules/_testcapimodule.c getargs_s_hash_int keywords -
-Modules/_testcapimodule.c - g_dict_watch_events -
-Modules/_testcapimodule.c - g_dict_watchers_installed -
-Modules/_testcapimodule.c - g_type_modified_events -
-Modules/_testcapimodule.c - g_type_watchers_installed -
-Modules/_testimportmultiple.c - _barmodule -
-Modules/_testimportmultiple.c - _foomodule -
-Modules/_testimportmultiple.c - _testimportmultiple -
-Modules/_testinternalcapi.c - TestMethods -
-Modules/_testinternalcapi.c - _testcapimodule -
-Modules/_testmultiphase.c - Example_Type_slots -
-Modules/_testmultiphase.c - Example_Type_spec -
-Modules/_testmultiphase.c - Example_methods -
-Modules/_testmultiphase.c - StateAccessType_Type_slots -
-Modules/_testmultiphase.c - StateAccessType_methods -
-Modules/_testmultiphase.c - StateAccessType_spec -
-Modules/_testmultiphase.c - Str_Type_slots -
-Modules/_testmultiphase.c - Str_Type_spec -
-Modules/_testmultiphase.c - def_bad_large -
-Modules/_testmultiphase.c - def_bad_negative -
-Modules/_testmultiphase.c - def_create_int_with_state -
-Modules/_testmultiphase.c - def_create_null -
-Modules/_testmultiphase.c - def_create_raise -
-Modules/_testmultiphase.c - def_create_unreported_exception -
-Modules/_testmultiphase.c - def_exec_err -
-Modules/_testmultiphase.c - def_exec_raise -
-Modules/_testmultiphase.c - def_exec_unreported_exception -
-Modules/_testmultiphase.c - def_meth_state_access -
-Modules/_testmultiphase.c - def_negative_size -
-Modules/_testmultiphase.c - def_nonascii_kana -
-Modules/_testmultiphase.c - def_nonascii_latin -
-Modules/_testmultiphase.c - def_nonmodule -
-Modules/_testmultiphase.c - def_nonmodule_with_exec_slots -
-Modules/_testmultiphase.c - def_nonmodule_with_methods -
-Modules/_testmultiphase.c - imp_dummy_def -
-Modules/_testmultiphase.c - main_def -
-Modules/_testmultiphase.c - main_slots -
-Modules/_testmultiphase.c - meth_state_access_slots -
-Modules/_testmultiphase.c - nonmodule_methods -
-Modules/_testmultiphase.c - null_slots_def -
-Modules/_testmultiphase.c - slots_bad_large -
-Modules/_testmultiphase.c - slots_bad_negative -
-Modules/_testmultiphase.c - slots_create_nonmodule -
-Modules/_testmultiphase.c - slots_create_nonmodule -
-Modules/_testmultiphase.c - slots_create_null -
-Modules/_testmultiphase.c - slots_create_raise -
-Modules/_testmultiphase.c - slots_create_unreported_exception -
-Modules/_testmultiphase.c - slots_exec_err -
-Modules/_testmultiphase.c - slots_exec_raise -
-Modules/_testmultiphase.c - slots_exec_unreported_exception -
-Modules/_testmultiphase.c - slots_nonmodule_with_exec_slots -
-Modules/_testmultiphase.c - testexport_methods -
-Modules/_testmultiphase.c - uninitialized_def -
-Modules/_xxtestfuzz/_xxtestfuzz.c - _fuzzmodule -
-Modules/_xxtestfuzz/_xxtestfuzz.c - module_methods -
-Modules/_xxtestfuzz/fuzzer.c - SRE_FLAG_DEBUG -
-Modules/_xxtestfuzz/fuzzer.c - ast_literal_eval_method -
-Modules/_xxtestfuzz/fuzzer.c - compiled_patterns -
-Modules/_xxtestfuzz/fuzzer.c - csv_error -
-Modules/_xxtestfuzz/fuzzer.c - csv_module -
-Modules/_xxtestfuzz/fuzzer.c - json_loads_method -
-Modules/_xxtestfuzz/fuzzer.c - regex_patterns -
-Modules/_xxtestfuzz/fuzzer.c - sre_compile_method -
-Modules/_xxtestfuzz/fuzzer.c - sre_error_exception -
-Modules/_xxtestfuzz/fuzzer.c - struct_error -
-Modules/_xxtestfuzz/fuzzer.c - struct_unpack_method -
-Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput CSV_READER_INITIALIZED -
-Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput JSON_LOADS_INITIALIZED -
-Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput SRE_COMPILE_INITIALIZED -
-Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput SRE_MATCH_INITIALIZED -
-Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput STRUCT_UNPACK_INITIALIZED -
-Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput AST_LITERAL_EVAL_INITIALIZED -
-
-
-##################################
-# should be const
-# XXX Make them const.
-
-# These are all variables that we will be leaving global.
-
-# All module defs, type defs, etc. are handled in c-analyzr/cpython/_analyzer.py.
-# All kwlist arrays are handled in c-analyzr/cpython/_analyzer.py.
-
-#-----------------------
-# other vars that are actually constant
+# other vars that are actually constant
Include/internal/pycore_blocks_output_buffer.h - BUFFER_BLOCK_SIZE -
Modules/_csv.c - quote_styles -
@@ -646,3 +350,327 @@ Python/stdlib_module_names.h - _Py_stdlib_module_names -
Python/sysmodule.c - _PySys_ImplCacheTag -
Python/sysmodule.c - _PySys_ImplName -
Python/sysmodule.c - whatstrings -
+
+##-----------------------
+## test code
+
+Modules/_ctypes/_ctypes_test.c - _ctypes_test_slots -
+Modules/_ctypes/_ctypes_test.c - _ctypes_testmodule -
+Modules/_ctypes/_ctypes_test.c - _xxx_lib -
+Modules/_ctypes/_ctypes_test.c - an_integer -
+Modules/_ctypes/_ctypes_test.c - bottom -
+Modules/_ctypes/_ctypes_test.c - last_tf_arg_s -
+Modules/_ctypes/_ctypes_test.c - last_tf_arg_u -
+Modules/_ctypes/_ctypes_test.c - last_tfrsuv_arg -
+Modules/_ctypes/_ctypes_test.c - left -
+Modules/_ctypes/_ctypes_test.c - module_methods -
+Modules/_ctypes/_ctypes_test.c - my_eggs -
+Modules/_ctypes/_ctypes_test.c - my_spams -
+Modules/_ctypes/_ctypes_test.c - right -
+Modules/_ctypes/_ctypes_test.c - top -
+Modules/_testbuffer.c - NDArray_Type -
+Modules/_testbuffer.c - StaticArray_Type -
+Modules/_testbuffer.c - Struct -
+Modules/_testbuffer.c - _testbuffer_functions -
+Modules/_testbuffer.c - _testbuffermodule -
+Modules/_testbuffer.c - calcsize -
+Modules/_testbuffer.c - infobuf -
+Modules/_testbuffer.c - ndarray_as_buffer -
+Modules/_testbuffer.c - ndarray_as_mapping -
+Modules/_testbuffer.c - ndarray_as_sequence -
+Modules/_testbuffer.c - ndarray_getset -
+Modules/_testbuffer.c - ndarray_methods -
+Modules/_testbuffer.c - simple_fmt -
+Modules/_testbuffer.c - simple_format -
+Modules/_testbuffer.c - static_buffer -
+Modules/_testbuffer.c - static_mem -
+Modules/_testbuffer.c - static_shape -
+Modules/_testbuffer.c - static_strides -
+Modules/_testbuffer.c - staticarray_as_buffer -
+Modules/_testbuffer.c - structmodule -
+Modules/_testbuffer.c ndarray_init kwlist -
+Modules/_testbuffer.c ndarray_memoryview_from_buffer format -
+Modules/_testbuffer.c ndarray_memoryview_from_buffer info -
+Modules/_testbuffer.c ndarray_memoryview_from_buffer shape -
+Modules/_testbuffer.c ndarray_memoryview_from_buffer strides -
+Modules/_testbuffer.c ndarray_memoryview_from_buffer suboffsets -
+Modules/_testbuffer.c ndarray_push kwlist -
+Modules/_testbuffer.c staticarray_init kwlist -
+Modules/_testcapi/heaptype.c - _testcapimodule -
+Modules/_testcapi/unicode.c - _testcapimodule -
+Modules/_testcapimodule.c - ContainerNoGC_members -
+Modules/_testcapimodule.c - ContainerNoGC_type -
+Modules/_testcapimodule.c - FmData -
+Modules/_testcapimodule.c - FmHook -
+Modules/_testcapimodule.c - GenericAlias_Type -
+Modules/_testcapimodule.c - Generic_Type -
+Modules/_testcapimodule.c - HeapCTypeSetattr_slots -
+Modules/_testcapimodule.c - HeapCTypeSetattr_spec -
+Modules/_testcapimodule.c - HeapCTypeSubclassWithFinalizer_slots -
+Modules/_testcapimodule.c - HeapCTypeSubclassWithFinalizer_spec -
+Modules/_testcapimodule.c - HeapCTypeSubclass_slots -
+Modules/_testcapimodule.c - HeapCTypeSubclass_spec -
+Modules/_testcapimodule.c - HeapCTypeWithBuffer_slots -
+Modules/_testcapimodule.c - HeapCTypeWithBuffer_spec -
+Modules/_testcapimodule.c - HeapCTypeWithDict_slots -
+Modules/_testcapimodule.c - HeapCTypeWithDict_spec -
+Modules/_testcapimodule.c - HeapCTypeWithNegativeDict_slots -
+Modules/_testcapimodule.c - HeapCTypeWithNegativeDict_spec -
+Modules/_testcapimodule.c - HeapCTypeWithWeakref_slots -
+Modules/_testcapimodule.c - HeapCTypeWithWeakref_spec -
+Modules/_testcapimodule.c - HeapCType_slots -
+Modules/_testcapimodule.c - HeapCType_spec -
+Modules/_testcapimodule.c - HeapDocCType_slots -
+Modules/_testcapimodule.c - HeapDocCType_spec -
+Modules/_testcapimodule.c - HeapGcCType_slots -
+Modules/_testcapimodule.c - HeapGcCType_spec -
+Modules/_testcapimodule.c - MethClass_Type -
+Modules/_testcapimodule.c - MethInstance_Type -
+Modules/_testcapimodule.c - MethStatic_Type -
+Modules/_testcapimodule.c - MethodDescriptor2_Type -
+Modules/_testcapimodule.c - MethodDescriptorBase_Type -
+Modules/_testcapimodule.c - MethodDescriptorDerived_Type -
+Modules/_testcapimodule.c - MethodDescriptorNopGet_Type -
+Modules/_testcapimodule.c - MyList_Type -
+Modules/_testcapimodule.c - PyRecursingInfinitelyError_Type -
+Modules/_testcapimodule.c - TestError -
+Modules/_testcapimodule.c - TestMethods -
+Modules/_testcapimodule.c - _HashInheritanceTester_Type -
+Modules/_testcapimodule.c - _testcapimodule -
+Modules/_testcapimodule.c - awaitType -
+Modules/_testcapimodule.c - awaitType_as_async -
+Modules/_testcapimodule.c - capsule_context -
+Modules/_testcapimodule.c - capsule_destructor_call_count -
+Modules/_testcapimodule.c - capsule_error -
+Modules/_testcapimodule.c - capsule_name -
+Modules/_testcapimodule.c - capsule_pointer -
+Modules/_testcapimodule.c - decimal_initialized -
+Modules/_testcapimodule.c - generic_alias_methods -
+Modules/_testcapimodule.c - generic_methods -
+Modules/_testcapimodule.c - heapctype_members -
+Modules/_testcapimodule.c - heapctypesetattr_members -
+Modules/_testcapimodule.c - heapctypesubclass_members -
+Modules/_testcapimodule.c - heapctypewithdict_getsetlist -
+Modules/_testcapimodule.c - heapctypewithdict_members -
+Modules/_testcapimodule.c - heapctypewithnegativedict_members -
+Modules/_testcapimodule.c - heapctypewithweakref_members -
+Modules/_testcapimodule.c - ipowType -
+Modules/_testcapimodule.c - ipowType_as_number -
+Modules/_testcapimodule.c - matmulType -
+Modules/_testcapimodule.c - matmulType_as_number -
+Modules/_testcapimodule.c - meth_class_methods -
+Modules/_testcapimodule.c - meth_instance_methods -
+Modules/_testcapimodule.c - meth_static_methods -
+Modules/_testcapimodule.c - ml -
+Modules/_testcapimodule.c - str1 -
+Modules/_testcapimodule.c - str2 -
+Modules/_testcapimodule.c - test_members -
+Modules/_testcapimodule.c - test_run_counter -
+Modules/_testcapimodule.c - test_structmembersType -
+Modules/_testcapimodule.c - thread_done -
+Modules/_testcapimodule.c - x -
+Modules/_testcapimodule.c getargs_keyword_only keywords -
+Modules/_testcapimodule.c getargs_keywords keywords -
+Modules/_testcapimodule.c getargs_positional_only_and_keywords keywords -
+Modules/_testcapimodule.c getargs_s_hash_int2 keywords static char*[]
+Modules/_testcapimodule.c make_exception_with_doc kwlist -
+Modules/_testcapimodule.c raise_SIGINT_then_send_None PyId_send -
+Modules/_testcapimodule.c slot_tp_del PyId___tp_del__ -
+Modules/_testcapimodule.c test_capsule buffer -
+Modules/_testcapimodule.c test_empty_argparse kwlist -
+Modules/_testcapimodule.c test_structmembers_new keywords -
+Modules/_testcapimodule.c getargs_s_hash_int keywords -
+Modules/_testcapimodule.c - g_dict_watch_events -
+Modules/_testcapimodule.c - g_dict_watchers_installed -
+Modules/_testcapimodule.c - g_type_modified_events -
+Modules/_testcapimodule.c - g_type_watchers_installed -
+Modules/_testimportmultiple.c - _barmodule -
+Modules/_testimportmultiple.c - _foomodule -
+Modules/_testimportmultiple.c - _testimportmultiple -
+Modules/_testinternalcapi.c - TestMethods -
+Modules/_testinternalcapi.c - _testcapimodule -
+Modules/_testmultiphase.c - Example_Type_slots -
+Modules/_testmultiphase.c - Example_Type_spec -
+Modules/_testmultiphase.c - Example_methods -
+Modules/_testmultiphase.c - StateAccessType_Type_slots -
+Modules/_testmultiphase.c - StateAccessType_methods -
+Modules/_testmultiphase.c - StateAccessType_spec -
+Modules/_testmultiphase.c - Str_Type_slots -
+Modules/_testmultiphase.c - Str_Type_spec -
+Modules/_testmultiphase.c - def_bad_large -
+Modules/_testmultiphase.c - def_bad_negative -
+Modules/_testmultiphase.c - def_create_int_with_state -
+Modules/_testmultiphase.c - def_create_null -
+Modules/_testmultiphase.c - def_create_raise -
+Modules/_testmultiphase.c - def_create_unreported_exception -
+Modules/_testmultiphase.c - def_exec_err -
+Modules/_testmultiphase.c - def_exec_raise -
+Modules/_testmultiphase.c - def_exec_unreported_exception -
+Modules/_testmultiphase.c - def_meth_state_access -
+Modules/_testmultiphase.c - def_negative_size -
+Modules/_testmultiphase.c - def_nonascii_kana -
+Modules/_testmultiphase.c - def_nonascii_latin -
+Modules/_testmultiphase.c - def_nonmodule -
+Modules/_testmultiphase.c - def_nonmodule_with_exec_slots -
+Modules/_testmultiphase.c - def_nonmodule_with_methods -
+Modules/_testmultiphase.c - imp_dummy_def -
+Modules/_testmultiphase.c - main_def -
+Modules/_testmultiphase.c - main_slots -
+Modules/_testmultiphase.c - meth_state_access_slots -
+Modules/_testmultiphase.c - nonmodule_methods -
+Modules/_testmultiphase.c - null_slots_def -
+Modules/_testmultiphase.c - slots_bad_large -
+Modules/_testmultiphase.c - slots_bad_negative -
+Modules/_testmultiphase.c - slots_create_nonmodule -
+Modules/_testmultiphase.c - slots_create_nonmodule -
+Modules/_testmultiphase.c - slots_create_null -
+Modules/_testmultiphase.c - slots_create_raise -
+Modules/_testmultiphase.c - slots_create_unreported_exception -
+Modules/_testmultiphase.c - slots_exec_err -
+Modules/_testmultiphase.c - slots_exec_raise -
+Modules/_testmultiphase.c - slots_exec_unreported_exception -
+Modules/_testmultiphase.c - slots_nonmodule_with_exec_slots -
+Modules/_testmultiphase.c - testexport_methods -
+Modules/_testmultiphase.c - uninitialized_def -
+Modules/_xxtestfuzz/_xxtestfuzz.c - _fuzzmodule -
+Modules/_xxtestfuzz/_xxtestfuzz.c - module_methods -
+Modules/_xxtestfuzz/fuzzer.c - SRE_FLAG_DEBUG -
+Modules/_xxtestfuzz/fuzzer.c - ast_literal_eval_method -
+Modules/_xxtestfuzz/fuzzer.c - compiled_patterns -
+Modules/_xxtestfuzz/fuzzer.c - csv_error -
+Modules/_xxtestfuzz/fuzzer.c - csv_module -
+Modules/_xxtestfuzz/fuzzer.c - json_loads_method -
+Modules/_xxtestfuzz/fuzzer.c - regex_patterns -
+Modules/_xxtestfuzz/fuzzer.c - sre_compile_method -
+Modules/_xxtestfuzz/fuzzer.c - sre_error_exception -
+Modules/_xxtestfuzz/fuzzer.c - struct_error -
+Modules/_xxtestfuzz/fuzzer.c - struct_unpack_method -
+Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput CSV_READER_INITIALIZED -
+Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput JSON_LOADS_INITIALIZED -
+Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput SRE_COMPILE_INITIALIZED -
+Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput SRE_MATCH_INITIALIZED -
+Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput STRUCT_UNPACK_INITIALIZED -
+Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput AST_LITERAL_EVAL_INITIALIZED -
+
+##-----------------------
+## the analyzer should have ignored these
+# XXX Fix the analyzer.
+
+## forward/extern references
+Include/py_curses.h - PyCurses_API -
+Include/pydecimal.h - _decimal_api -
+Modules/_blake2/blake2module.c - blake2b_type_spec -
+Modules/_blake2/blake2module.c - blake2s_type_spec -
+Modules/_io/fileio.c - _Py_open_cloexec_works -
+Modules/_io/_iomodule.h - PyIOBase_Type -
+Modules/_io/_iomodule.h - PyRawIOBase_Type -
+Modules/_io/_iomodule.h - PyBufferedIOBase_Type -
+Modules/_io/_iomodule.h - PyTextIOBase_Type -
+Modules/_io/_iomodule.h - PyFileIO_Type -
+Modules/_io/_iomodule.h - PyBytesIO_Type -
+Modules/_io/_iomodule.h - PyStringIO_Type -
+Modules/_io/_iomodule.h - PyBufferedReader_Type -
+Modules/_io/_iomodule.h - PyBufferedWriter_Type -
+Modules/_io/_iomodule.h - PyBufferedRWPair_Type -
+Modules/_io/_iomodule.h - PyBufferedRandom_Type -
+Modules/_io/_iomodule.h - PyTextIOWrapper_Type -
+Modules/_io/_iomodule.h - PyIncrementalNewlineDecoder_Type -
+Modules/_io/_iomodule.h - _PyBytesIOBuffer_Type -
+Modules/_io/_iomodule.h - _PyIO_Module -
+Modules/_io/_iomodule.h - _PyIO_str_close -
+Modules/_io/_iomodule.h - _PyIO_str_closed -
+Modules/_io/_iomodule.h - _PyIO_str_decode -
+Modules/_io/_iomodule.h - _PyIO_str_encode -
+Modules/_io/_iomodule.h - _PyIO_str_fileno -
+Modules/_io/_iomodule.h - _PyIO_str_flush -
+Modules/_io/_iomodule.h - _PyIO_str_getstate -
+Modules/_io/_iomodule.h - _PyIO_str_isatty -
+Modules/_io/_iomodule.h - _PyIO_str_newlines -
+Modules/_io/_iomodule.h - _PyIO_str_nl -
+Modules/_io/_iomodule.h - _PyIO_str_peek -
+Modules/_io/_iomodule.h - _PyIO_str_read -
+Modules/_io/_iomodule.h - _PyIO_str_read1 -
+Modules/_io/_iomodule.h - _PyIO_str_readable -
+Modules/_io/_iomodule.h - _PyIO_str_readall -
+Modules/_io/_iomodule.h - _PyIO_str_readinto -
+Modules/_io/_iomodule.h - _PyIO_str_readline -
+Modules/_io/_iomodule.h - _PyIO_str_reset -
+Modules/_io/_iomodule.h - _PyIO_str_seek -
+Modules/_io/_iomodule.h - _PyIO_str_seekable -
+Modules/_io/_iomodule.h - _PyIO_str_setstate -
+Modules/_io/_iomodule.h - _PyIO_str_tell -
+Modules/_io/_iomodule.h - _PyIO_str_truncate -
+Modules/_io/_iomodule.h - _PyIO_str_writable -
+Modules/_io/_iomodule.h - _PyIO_str_write -
+Modules/_io/_iomodule.h - _PyIO_empty_str -
+Modules/_io/_iomodule.h - _PyIO_empty_bytes -
+Modules/_multiprocessing/multiprocessing.h - _PyMp_SemLockType -
+Modules/_sqlite/module.c - _pysqlite_converters -
+Modules/_sqlite/module.c - _pysqlite_enable_callback_tracebacks -
+Modules/_sqlite/module.c - pysqlite_BaseTypeAdapted -
+Modules/_sqlite/module.h - pysqlite_global_state -
+Modules/_testcapimodule.c - _PyBytesIOBuffer_Type -
+Modules/posixmodule.c - _Py_open_cloexec_works -
+Modules/posixmodule.c - environ -
+Objects/object.c - _Py_GenericAliasIterType -
+Objects/object.c - _PyMemoryIter_Type -
+Objects/object.c - _PyLineIterator -
+Objects/object.c - _PyPositionsIterator -
+Python/perf_trampoline.c - _Py_trampoline_func_start -
+Python/perf_trampoline.c - _Py_trampoline_func_end -
+Python/importdl.h - _PyImport_DynLoadFiletab -
+Modules/expat/xmlrole.c - prolog0 -
+Modules/expat/xmlrole.c - prolog1 -
+Modules/expat/xmlrole.c - prolog2 -
+Modules/expat/xmlrole.c - doctype0 -
+Modules/expat/xmlrole.c - doctype1 -
+Modules/expat/xmlrole.c - doctype2 -
+Modules/expat/xmlrole.c - doctype3 -
+Modules/expat/xmlrole.c - doctype4 -
+Modules/expat/xmlrole.c - doctype5 -
+Modules/expat/xmlrole.c - internalSubset -
+Modules/expat/xmlrole.c - entity0 -
+Modules/expat/xmlrole.c - entity1 -
+Modules/expat/xmlrole.c - entity2 -
+Modules/expat/xmlrole.c - entity3 -
+Modules/expat/xmlrole.c - entity4 -
+Modules/expat/xmlrole.c - entity5 -
+Modules/expat/xmlrole.c - entity6 -
+Modules/expat/xmlrole.c - entity7 -
+Modules/expat/xmlrole.c - entity8 -
+Modules/expat/xmlrole.c - entity9 -
+Modules/expat/xmlrole.c - entity10 -
+Modules/expat/xmlrole.c - notation0 -
+Modules/expat/xmlrole.c - notation1 -
+Modules/expat/xmlrole.c - notation2 -
+Modules/expat/xmlrole.c - notation3 -
+Modules/expat/xmlrole.c - notation4 -
+Modules/expat/xmlrole.c - attlist0 -
+Modules/expat/xmlrole.c - attlist1 -
+Modules/expat/xmlrole.c - attlist2 -
+Modules/expat/xmlrole.c - attlist3 -
+Modules/expat/xmlrole.c - attlist4 -
+Modules/expat/xmlrole.c - attlist5 -
+Modules/expat/xmlrole.c - attlist6 -
+Modules/expat/xmlrole.c - attlist7 -
+Modules/expat/xmlrole.c - attlist8 -
+Modules/expat/xmlrole.c - attlist9 -
+Modules/expat/xmlrole.c - element0 -
+Modules/expat/xmlrole.c - element1 -
+Modules/expat/xmlrole.c - element2 -
+Modules/expat/xmlrole.c - element3 -
+Modules/expat/xmlrole.c - element4 -
+Modules/expat/xmlrole.c - element5 -
+Modules/expat/xmlrole.c - element6 -
+Modules/expat/xmlrole.c - element7 -
+Modules/expat/xmlrole.c - externalSubset0 -
+Modules/expat/xmlrole.c - externalSubset1 -
+Modules/expat/xmlrole.c - condSect0 -
+Modules/expat/xmlrole.c - condSect1 -
+Modules/expat/xmlrole.c - condSect2 -
+Modules/expat/xmlrole.c - declClose -
+Modules/expat/xmlrole.c - error -
+
+## other
+Modules/_io/_iomodule.c - _PyIO_Module -
+Modules/_sqlite/module.c - _sqlite3module -
diff --git a/Tools/c-analyzer/table-file.py b/Tools/c-analyzer/table-file.py
index 3cc05cc9de7779..d36f814415c8e7 100644
--- a/Tools/c-analyzer/table-file.py
+++ b/Tools/c-analyzer/table-file.py
@@ -1,43 +1,59 @@
+KINDS = [
+ 'section-major',
+ 'section-minor',
+ 'section-group',
+ 'row',
+]
+
+
def iter_clean_lines(lines):
lines = iter(lines)
- for line in lines:
- line = line.strip()
- if line.startswith('# XXX'):
+ for rawline in lines:
+ line = rawline.strip()
+ if line.startswith('#') and not rawline.startswith('##'):
continue
- yield line
+ yield line, rawline
def parse_table_lines(lines):
lines = iter_clean_lines(lines)
- for line in lines:
- if line.startswith(('####', '#----')):
- kind = 0 if line[1] == '#' else 1
- try:
- line = next(lines).strip()
- except StopIteration:
- line = ''
- if not line.startswith('# '):
- raise NotImplementedError(line)
- yield kind, line[2:].lstrip()
- continue
-
- maybe = None
- while line.startswith('#'):
- if line != '#' and line[1] == ' ':
- maybe = line[2:].lstrip()
- try:
- line = next(lines).strip()
- except StopIteration:
- return
- if not line:
- break
- else:
- if line:
- if maybe:
- yield 2, maybe
- yield 'row', line
+ group = None
+ prev = ''
+ for line, rawline in lines:
+ if line.startswith('## '):
+ assert not rawline.startswith(' '), (line, rawline)
+ if group:
+ assert prev, (line, rawline)
+ kind, after, _ = group
+ assert kind and kind != 'section-group', (group, line, rawline)
+ assert after is not None, (group, line, rawline)
+ else:
+ assert not prev, (prev, line, rawline)
+ kind, after = group = ('section-group', None)
+ title = line[3:].lstrip()
+ assert title, (line, rawline)
+ if after is not None:
+ try:
+ line, rawline = next(lines)
+ except StopIteration:
+ line = None
+ if line != after:
+ raise NotImplementedError((group, line, rawline))
+ yield kind, title
+ group = None
+ elif group:
+ raise NotImplementedError((group, line, rawline))
+ elif line.startswith('##---'):
+ assert line.rstrip('-') == '##', (line, rawline)
+ group = ('section-minor', '', line)
+ elif line.startswith('#####'):
+ assert not line.strip('#'), (line, rawline)
+ group = ('section-major', '', line)
+ elif line:
+ yield 'row', line
+ prev = line
def iter_sections(lines):
@@ -49,12 +65,13 @@ def iter_sections(lines):
if header is None:
header = value
continue
- raise NotImplementedError(value)
+ raise NotImplementedError(repr(value))
yield tuple(section), value
else:
if header is None:
header = False
- section[kind:] = [value]
+ start = KINDS.index(kind)
+ section[start:] = [value]
def collect_sections(lines):
diff --git a/Tools/cases_generator/generate_cases.py b/Tools/cases_generator/generate_cases.py
index 424b15ede2aadf..2dfc76f2560eae 100644
--- a/Tools/cases_generator/generate_cases.py
+++ b/Tools/cases_generator/generate_cases.py
@@ -13,144 +13,206 @@
import typing
import parser
-
-DEFAULT_INPUT = "Python/bytecodes.c"
-DEFAULT_OUTPUT = "Python/generated_cases.c.h"
+from parser import StackEffect
+
+DEFAULT_INPUT = os.path.relpath(
+ os.path.join(os.path.dirname(__file__), "../../Python/bytecodes.c")
+)
+DEFAULT_OUTPUT = os.path.relpath(
+ os.path.join(os.path.dirname(__file__), "../../Python/generated_cases.c.h")
+)
BEGIN_MARKER = "// BEGIN BYTECODES //"
END_MARKER = "// END BYTECODES //"
-RE_PREDICTED = r"(?s)(?:PREDICT\(|GO_TO_INSTRUCTION\(|DEOPT_IF\(.*?,\s*)(\w+)\);"
+RE_PREDICTED = r"^\s*(?:PREDICT\(|GO_TO_INSTRUCTION\(|DEOPT_IF\(.*?,\s*)(\w+)\);\s*$"
UNUSED = "unused"
BITS_PER_CODE_UNIT = 16
-arg_parser = argparse.ArgumentParser()
-arg_parser.add_argument("-i", "--input", type=str, default=DEFAULT_INPUT)
-arg_parser.add_argument("-o", "--output", type=str, default=DEFAULT_OUTPUT)
+arg_parser = argparse.ArgumentParser(
+ description="Generate the code for the interpreter switch.",
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
+)
+arg_parser.add_argument(
+ "-i", "--input", type=str, help="Instruction definitions", default=DEFAULT_INPUT
+)
+arg_parser.add_argument(
+ "-o", "--output", type=str, help="Generated code", default=DEFAULT_OUTPUT
+)
+
+
+class Formatter:
+ """Wraps an output stream with the ability to indent etc."""
+
+ stream: typing.TextIO
+ prefix: str
+
+ def __init__(self, stream: typing.TextIO, indent: int) -> None:
+ self.stream = stream
+ self.prefix = " " * indent
+
+ def write_raw(self, s: str) -> None:
+ self.stream.write(s)
+
+ def emit(self, arg: str) -> None:
+ if arg:
+ self.write_raw(f"{self.prefix}{arg}\n")
+ else:
+ self.write_raw("\n")
+
+ @contextlib.contextmanager
+ def indent(self):
+ self.prefix += " "
+ yield
+ self.prefix = self.prefix[:-4]
+
+ @contextlib.contextmanager
+ def block(self, head: str):
+ if head:
+ self.emit(head + " {")
+ else:
+ self.emit("{")
+ with self.indent():
+ yield
+ self.emit("}")
+
+ def stack_adjust(self, diff: int):
+ if diff > 0:
+ self.emit(f"STACK_GROW({diff});")
+ elif diff < 0:
+ self.emit(f"STACK_SHRINK({-diff});")
+
+ def declare(self, dst: StackEffect, src: StackEffect | None):
+ if dst.name == UNUSED:
+ return
+ typ = f"{dst.type} " if dst.type else "PyObject *"
+ init = ""
+ if src:
+ cast = self.cast(dst, src)
+ init = f" = {cast}{src.name}"
+ self.emit(f"{typ}{dst.name}{init};")
+
+ def assign(self, dst: StackEffect, src: StackEffect):
+ if src.name == UNUSED:
+ return
+ cast = self.cast(dst, src)
+ if m := re.match(r"^PEEK\((\d+)\)$", dst.name):
+ self.emit(f"POKE({m.group(1)}, {cast}{src.name});")
+ else:
+ self.emit(f"{dst.name} = {cast}{src.name};")
+
+ def cast(self, dst: StackEffect, src: StackEffect) -> str:
+ return f"({dst.type or 'PyObject *'})" if src.type != dst.type else ""
-# This is not a data class
-class Instruction(parser.InstDef):
+@dataclasses.dataclass
+class Instruction:
"""An instruction with additional data and code."""
+ # Parts of the underlying instruction definition
+ inst: parser.InstDef
+ kind: typing.Literal["inst", "op"]
+ name: str
+ block: parser.Block
+ block_text: list[str] # Block.text, less curlies, less PREDICT() calls
+ predictions: list[str] # Prediction targets (instruction names)
+
# Computed by constructor
always_exits: bool
cache_offset: int
cache_effects: list[parser.CacheEffect]
- input_effects: list[parser.StackEffect]
- output_effects: list[parser.StackEffect]
+ input_effects: list[StackEffect]
+ output_effects: list[StackEffect]
# Set later
family: parser.Family | None = None
predicted: bool = False
def __init__(self, inst: parser.InstDef):
- super().__init__(inst.header, inst.block)
- self.context = inst.context
- self.always_exits = always_exits(self.block)
+ self.inst = inst
+ self.kind = inst.kind
+ self.name = inst.name
+ self.block = inst.block
+ self.block_text, self.predictions = extract_block_text(self.block)
+ self.always_exits = always_exits(self.block_text)
self.cache_effects = [
- effect for effect in self.inputs if isinstance(effect, parser.CacheEffect)
+ effect for effect in inst.inputs if isinstance(effect, parser.CacheEffect)
]
self.cache_offset = sum(c.size for c in self.cache_effects)
self.input_effects = [
- effect for effect in self.inputs if isinstance(effect, parser.StackEffect)
+ effect for effect in inst.inputs if isinstance(effect, StackEffect)
]
- self.output_effects = self.outputs # For consistency/completeness
+ self.output_effects = inst.outputs # For consistency/completeness
- def write(self, f: typing.TextIO, indent: str, dedent: int = 0) -> None:
+ def write(self, out: Formatter) -> None:
"""Write one instruction, sans prologue and epilogue."""
- if dedent < 0:
- indent += " " * -dedent # DO WE NEED THIS?
-
- # Get cache offset and maybe assert that it is correct
+ # Write a static assertion that a family's cache size is correct
if family := self.family:
if self.name == family.members[0]:
if cache_size := family.size:
- f.write(
- f"{indent} static_assert({cache_size} == "
- f'{self.cache_offset}, "incorrect cache size");\n'
+ out.emit(
+ f"static_assert({cache_size} == "
+ f'{self.cache_offset}, "incorrect cache size");'
)
- # Write cache effect variable declarations
- cache_offset = 0
- for ceffect in self.cache_effects:
- if ceffect.name != UNUSED:
- bits = ceffect.size * BITS_PER_CODE_UNIT
- if bits == 64:
- # NOTE: We assume that 64-bit data in the cache
- # is always an object pointer.
- # If this becomes false, we need a way to specify
- # syntactically what type the cache data is.
- f.write(
- f"{indent} PyObject *{ceffect.name} = "
- f"read_obj(next_instr + {cache_offset});\n"
- )
- else:
- f.write(f"{indent} uint{bits}_t {ceffect.name} = "
- f"read_u{bits}(next_instr + {cache_offset});\n")
- cache_offset += ceffect.size
- assert cache_offset == self.cache_offset
-
# Write input stack effect variable declarations and initializations
- for i, seffect in enumerate(reversed(self.input_effects), 1):
- if seffect.name != UNUSED:
- f.write(f"{indent} PyObject *{seffect.name} = PEEK({i});\n")
+ for i, ieffect in enumerate(reversed(self.input_effects), 1):
+ src = StackEffect(f"PEEK({i})", "")
+ out.declare(ieffect, src)
# Write output stack effect variable declarations
- input_names = {seffect.name for seffect in self.input_effects}
- input_names.add(UNUSED)
- for seffect in self.output_effects:
- if seffect.name not in input_names:
- f.write(f"{indent} PyObject *{seffect.name};\n")
+ input_names = {ieffect.name for ieffect in self.input_effects}
+ for oeffect in self.output_effects:
+ if oeffect.name not in input_names:
+ out.declare(oeffect, None)
- self.write_body(f, indent, dedent)
+ self.write_body(out, 0)
# Skip the rest if the block always exits
- if always_exits(self.block):
+ if self.always_exits:
return
# Write net stack growth/shrinkage
diff = len(self.output_effects) - len(self.input_effects)
- if diff > 0:
- f.write(f"{indent} STACK_GROW({diff});\n")
- elif diff < 0:
- f.write(f"{indent} STACK_SHRINK({-diff});\n")
+ out.stack_adjust(diff)
# Write output stack effect assignments
- unmoved_names = {UNUSED}
+ unmoved_names: set[str] = set()
for ieffect, oeffect in zip(self.input_effects, self.output_effects):
if ieffect.name == oeffect.name:
unmoved_names.add(ieffect.name)
- for i, seffect in enumerate(reversed(self.output_effects)):
- if seffect.name not in unmoved_names:
- f.write(f"{indent} POKE({i+1}, {seffect.name});\n")
+ for i, oeffect in enumerate(reversed(self.output_effects), 1):
+ if oeffect.name not in unmoved_names:
+ dst = StackEffect(f"PEEK({i})", "")
+ out.assign(dst, oeffect)
# Write cache effect
if self.cache_offset:
- f.write(f"{indent} next_instr += {self.cache_offset};\n")
+ out.emit(f"JUMPBY({self.cache_offset});")
- def write_body(self, f: typing.TextIO, ndent: str, dedent: int) -> None:
+ def write_body(self, out: Formatter, dedent: int, cache_adjust: int = 0) -> None:
"""Write the instruction body."""
-
- # Get lines of text with proper dedent
- blocklines = self.block.to_text(dedent=dedent).splitlines(True)
-
- # Remove blank lines from both ends
- while blocklines and not blocklines[0].strip():
- blocklines.pop(0)
- while blocklines and not blocklines[-1].strip():
- blocklines.pop()
-
- # Remove leading and trailing braces
- assert blocklines and blocklines[0].strip() == "{"
- assert blocklines and blocklines[-1].strip() == "}"
- blocklines.pop()
- blocklines.pop(0)
-
- # Remove trailing blank lines
- while blocklines and not blocklines[-1].strip():
- blocklines.pop()
+ # Write cache effect variable declarations and initializations
+ cache_offset = cache_adjust
+ for ceffect in self.cache_effects:
+ if ceffect.name != UNUSED:
+ bits = ceffect.size * BITS_PER_CODE_UNIT
+ if bits == 64:
+ # NOTE: We assume that 64-bit data in the cache
+ # is always an object pointer.
+ # If this becomes false, we need a way to specify
+ # syntactically what type the cache data is.
+ typ = "PyObject *"
+ func = "read_obj"
+ else:
+ typ = f"uint{bits}_t "
+ func = f"read_u{bits}"
+ out.emit(f"{typ}{ceffect.name} = {func}(&next_instr[{cache_offset}].cache);")
+ cache_offset += ceffect.size
+ assert cache_offset == self.cache_offset + cache_adjust
# Write the body, substituting a goto for ERROR_IF()
- for line in blocklines:
+ assert dedent <= 0
+ extra = " " * -dedent
+ for line in self.block_text:
if m := re.match(r"(\s*)ERROR_IF\((.+), (\w+)\);\s*$", line):
space, cond, label = m.groups()
# ERROR_IF() must pop the inputs from the stack.
@@ -165,122 +227,104 @@ def write_body(self, f: typing.TextIO, ndent: str, dedent: int) -> None:
else:
break
if ninputs:
- f.write(f"{space}if ({cond}) goto pop_{ninputs}_{label};\n")
+ out.write_raw(
+ f"{extra}{space}if ({cond}) goto pop_{ninputs}_{label};\n"
+ )
else:
- f.write(f"{space}if ({cond}) goto {label};\n")
+ out.write_raw(f"{extra}{space}if ({cond}) goto {label};\n")
else:
- f.write(line)
+ out.write_raw(extra + line)
+
+
+InstructionOrCacheEffect = Instruction | parser.CacheEffect
+StackEffectMapping = list[tuple[StackEffect, StackEffect]]
@dataclasses.dataclass
-class SuperComponent:
+class Component:
instr: Instruction
- input_mapping: dict[str, parser.StackEffect]
- output_mapping: dict[str, parser.StackEffect]
+ input_mapping: StackEffectMapping
+ output_mapping: StackEffectMapping
+
+ def write_body(self, out: Formatter, cache_adjust: int) -> None:
+ with out.block(""):
+ for var, ieffect in self.input_mapping:
+ out.declare(ieffect, var)
+ for _, oeffect in self.output_mapping:
+ out.declare(oeffect, None)
+ self.instr.write_body(out, dedent=-4, cache_adjust=cache_adjust)
-class SuperInstruction(parser.Super):
+ for var, oeffect in self.output_mapping:
+ out.assign(var, oeffect)
- stack: list[str]
+
+@dataclasses.dataclass
+class SuperOrMacroInstruction:
+ """Common fields for super- and macro instructions."""
+
+ name: str
+ stack: list[StackEffect]
initial_sp: int
final_sp: int
- parts: list[SuperComponent]
- def __init__(self, sup: parser.Super):
- super().__init__(sup.kind, sup.name, sup.ops)
- self.context = sup.context
- def analyze(self, a: "Analyzer") -> None:
- components = self.check_components(a)
- self.stack, self.initial_sp = self.super_macro_analysis(a, components)
- sp = self.initial_sp
- self.parts = []
- for instr in components:
- input_mapping = {}
- for ieffect in reversed(instr.input_effects):
- sp -= 1
- if ieffect.name != UNUSED:
- input_mapping[self.stack[sp]] = ieffect
- output_mapping = {}
- for oeffect in instr.output_effects:
- if oeffect.name != UNUSED:
- output_mapping[self.stack[sp]] = oeffect
- sp += 1
- self.parts.append(SuperComponent(instr, input_mapping, output_mapping))
- self.final_sp = sp
-
- def check_components(self, a: "Analyzer") -> list[Instruction]:
- components: list[Instruction] = []
- if not self.ops:
- a.error(f"{self.kind.capitalize()}-instruction has no operands", self)
- for name in self.ops:
- if name not in a.instrs:
- a.error(f"Unknown instruction {name!r}", self)
- else:
- instr = a.instrs[name]
- if self.kind == "super" and instr.kind != "inst":
- a.error(f"Super-instruction operand {instr.name} must be inst, not op", instr)
- components.append(instr)
- return components
+@dataclasses.dataclass
+class SuperInstruction(SuperOrMacroInstruction):
+ """A super-instruction."""
- def super_macro_analysis(
- self, a: "Analyzer", components: list[Instruction]
- ) -> tuple[list[str], int]:
- """Analyze a super-instruction or macro.
+ super: parser.Super
+ parts: list[Component]
- Print an error if there's a cache effect (which we don't support yet).
- Return the list of variable names and the initial stack pointer.
- """
- lowest = current = highest = 0
- for instr in components:
- if instr.cache_effects:
- a.error(
- f"Super-instruction {self.name!r} has cache effects in {instr.name!r}",
- instr,
- )
- current -= len(instr.input_effects)
- lowest = min(lowest, current)
- current += len(instr.output_effects)
- highest = max(highest, current)
- # At this point, 'current' is the net stack effect,
- # and 'lowest' and 'highest' are the extremes.
- # Note that 'lowest' may be negative.
- stack = [f"_tmp_{i+1}" for i in range(highest - lowest)]
- return stack, -lowest
+@dataclasses.dataclass
+class MacroInstruction(SuperOrMacroInstruction):
+ """A macro instruction."""
+
+ macro: parser.Macro
+ parts: list[Component | parser.CacheEffect]
class Analyzer:
"""Parse input, analyze it, and write to output."""
filename: str
+ output_filename: str
src: str
errors: int = 0
+ def __init__(self, filename: str, output_filename: str):
+ """Read the input file."""
+ self.filename = filename
+ self.output_filename = output_filename
+ with open(filename) as f:
+ self.src = f.read()
+
def error(self, msg: str, node: parser.Node) -> None:
lineno = 0
if context := node.context:
# Use line number of first non-comment in the node
- for token in context.owner.tokens[context.begin : context.end]:
+ for token in context.owner.tokens[context.begin : context.end]:
lineno = token.line
if token.kind != "COMMENT":
break
print(f"{self.filename}:{lineno}: {msg}", file=sys.stderr)
self.errors += 1
- def __init__(self, filename: str):
- """Read the input file."""
- self.filename = filename
- with open(filename) as f:
- self.src = f.read()
-
+ everything: list[parser.InstDef | parser.Super | parser.Macro]
instrs: dict[str, Instruction] # Includes ops
- supers: dict[str, parser.Super] # Includes macros
+ supers: dict[str, parser.Super]
super_instrs: dict[str, SuperInstruction]
+ macros: dict[str, parser.Macro]
+ macro_instrs: dict[str, MacroInstruction]
families: dict[str, parser.Family]
def parse(self) -> None:
- """Parse the source text."""
+ """Parse the source text.
+
+ We only want the parser to see the stuff between the
+ begin and end markers.
+ """
psr = parser.Parser(self.src, filename=self.filename)
# Skip until begin marker
@@ -291,24 +335,42 @@ def parse(self) -> None:
raise psr.make_syntax_error(
f"Couldn't find {BEGIN_MARKER!r} in {psr.filename}"
)
+ start = psr.getpos()
+
+ # Find end marker, then delete everything after it
+ while tkn := psr.next(raw=True):
+ if tkn.text == END_MARKER:
+ break
+ del psr.tokens[psr.getpos() - 1 :]
- # Parse until end marker
+ # Parse from start
+ psr.setpos(start)
+ self.everything = []
self.instrs = {}
self.supers = {}
+ self.macros = {}
self.families = {}
- while (tkn := psr.peek(raw=True)) and tkn.text != END_MARKER:
- if inst := psr.inst_def():
- self.instrs[inst.name] = instr = Instruction(inst)
- elif super := psr.super_def():
- self.supers[super.name] = super
- elif family := psr.family_def():
- self.families[family.name] = family
- else:
- raise psr.make_syntax_error(f"Unexpected token")
+ while thing := psr.definition():
+ match thing:
+ case parser.InstDef(name=name):
+ self.instrs[name] = Instruction(thing)
+ self.everything.append(thing)
+ case parser.Super(name):
+ self.supers[name] = thing
+ self.everything.append(thing)
+ case parser.Macro(name):
+ self.macros[name] = thing
+ self.everything.append(thing)
+ case parser.Family(name):
+ self.families[name] = thing
+ case _:
+ typing.assert_never(thing)
+ if not psr.eof():
+ raise psr.make_syntax_error("Extra stuff at the end")
print(
- f"Read {len(self.instrs)} instructions, "
- f"{len(self.supers)} supers/macros, "
+ f"Read {len(self.instrs)} instructions/ops, "
+ f"{len(self.supers)} supers, {len(self.macros)} macros, "
f"and {len(self.families)} families from {self.filename}",
file=sys.stderr,
)
@@ -321,18 +383,22 @@ def analyze(self) -> None:
self.find_predictions()
self.map_families()
self.check_families()
- self.analyze_supers()
+ self.analyze_supers_and_macros()
def find_predictions(self) -> None:
"""Find the instructions that need PREDICTED() labels."""
for instr in self.instrs.values():
- for target in re.findall(RE_PREDICTED, instr.block.text):
+ targets = set(instr.predictions)
+ for line in instr.block_text:
+ if m := re.match(RE_PREDICTED, line):
+ targets.add(m.group(1))
+ for target in targets:
if target_instr := self.instrs.get(target):
target_instr.predicted = True
else:
self.error(
f"Unknown instruction {target!r} predicted in {instr.name!r}",
- instr, # TODO: Use better location
+ instr.inst, # TODO: Use better location
)
def map_families(self) -> None:
@@ -360,7 +426,9 @@ def check_families(self) -> None:
members = [member for member in family.members if member in self.instrs]
if members != family.members:
unknown = set(family.members) - set(members)
- self.error(f"Family {family.name!r} has unknown members: {unknown}", family)
+ self.error(
+ f"Family {family.name!r} has unknown members: {unknown}", family
+ )
if len(members) < 2:
continue
head = self.instrs[members[0]]
@@ -381,119 +449,247 @@ def check_families(self) -> None:
family,
)
- def analyze_supers(self) -> None:
- """Analyze each super instruction."""
+ def analyze_supers_and_macros(self) -> None:
+ """Analyze each super- and macro instruction."""
self.super_instrs = {}
- for name, sup in self.supers.items():
- dup = SuperInstruction(sup)
- dup.analyze(self)
- self.super_instrs[name] = dup
+ self.macro_instrs = {}
+ for name, super in self.supers.items():
+ self.super_instrs[name] = self.analyze_super(super)
+ for name, macro in self.macros.items():
+ self.macro_instrs[name] = self.analyze_macro(macro)
+
+ def analyze_super(self, super: parser.Super) -> SuperInstruction:
+ components = self.check_super_components(super)
+ stack, initial_sp = self.stack_analysis(components)
+ sp = initial_sp
+ parts: list[Component] = []
+ for instr in components:
+ part, sp = self.analyze_instruction(instr, stack, sp)
+ parts.append(part)
+ final_sp = sp
+ return SuperInstruction(super.name, stack, initial_sp, final_sp, super, parts)
+
+ def analyze_macro(self, macro: parser.Macro) -> MacroInstruction:
+ components = self.check_macro_components(macro)
+ stack, initial_sp = self.stack_analysis(components)
+ sp = initial_sp
+ parts: list[Component | parser.CacheEffect] = []
+ for component in components:
+ match component:
+ case parser.CacheEffect() as ceffect:
+ parts.append(ceffect)
+ case Instruction() as instr:
+ part, sp = self.analyze_instruction(instr, stack, sp)
+ parts.append(part)
+ case _:
+ typing.assert_never(component)
+ final_sp = sp
+ return MacroInstruction(macro.name, stack, initial_sp, final_sp, macro, parts)
+
+ def analyze_instruction(
+ self, instr: Instruction, stack: list[StackEffect], sp: int
+ ) -> tuple[Component, int]:
+ input_mapping: StackEffectMapping = []
+ for ieffect in reversed(instr.input_effects):
+ sp -= 1
+ input_mapping.append((stack[sp], ieffect))
+ output_mapping: StackEffectMapping = []
+ for oeffect in instr.output_effects:
+ output_mapping.append((stack[sp], oeffect))
+ sp += 1
+ return Component(instr, input_mapping, output_mapping), sp
+
+ def check_super_components(self, super: parser.Super) -> list[Instruction]:
+ components: list[Instruction] = []
+ for op in super.ops:
+ if op.name not in self.instrs:
+ self.error(f"Unknown instruction {op.name!r}", super)
+ else:
+ components.append(self.instrs[op.name])
+ return components
+
+ def check_macro_components(
+ self, macro: parser.Macro
+ ) -> list[InstructionOrCacheEffect]:
+ components: list[InstructionOrCacheEffect] = []
+ for uop in macro.uops:
+ match uop:
+ case parser.OpName(name):
+ if name not in self.instrs:
+ self.error(f"Unknown instruction {name!r}", macro)
+ components.append(self.instrs[name])
+ case parser.CacheEffect():
+ components.append(uop)
+ case _:
+ typing.assert_never(uop)
+ return components
+
+ def stack_analysis(
+ self, components: typing.Iterable[InstructionOrCacheEffect]
+ ) -> tuple[list[StackEffect], int]:
+ """Analyze a super-instruction or macro.
+
+ Print an error if there's a cache effect (which we don't support yet).
- def write_instructions(self, filename: str) -> None:
+ Return the list of variable names and the initial stack pointer.
+ """
+ lowest = current = highest = 0
+ for thing in components:
+ match thing:
+ case Instruction() as instr:
+ current -= len(instr.input_effects)
+ lowest = min(lowest, current)
+ current += len(instr.output_effects)
+ highest = max(highest, current)
+ case parser.CacheEffect():
+ pass
+ case _:
+ typing.assert_never(thing)
+ # At this point, 'current' is the net stack effect,
+ # and 'lowest' and 'highest' are the extremes.
+ # Note that 'lowest' may be negative.
+ # TODO: Reverse the numbering.
+ stack = [
+ StackEffect(f"_tmp_{i+1}", "") for i in reversed(range(highest - lowest))
+ ]
+ return stack, -lowest
+
+ def write_instructions(self) -> None:
"""Write instructions to output file."""
- indent = " " * 8
- with open(filename, "w") as f:
+ with open(self.output_filename, "w") as f:
# Write provenance header
f.write(f"// This file is generated by {os.path.relpath(__file__)}\n")
f.write(f"// from {os.path.relpath(self.filename)}\n")
f.write(f"// Do not edit!\n")
- # Write regular instructions
+ # Create formatter; the rest of the code uses this.
+ self.out = Formatter(f, 8)
+
+ # Write and count instructions of all kinds
n_instrs = 0
- for name, instr in self.instrs.items():
- if instr.kind != "inst":
- continue # ops are not real instructions
- n_instrs += 1
- f.write(f"\n{indent}TARGET({name}) {{\n")
- if instr.predicted:
- f.write(f"{indent} PREDICTED({name});\n")
- instr.write(f, indent)
- if not always_exits(instr.block):
- f.write(f"{indent} DISPATCH();\n")
- f.write(f"{indent}}}\n")
-
- # Write super-instructions and macros
n_supers = 0
n_macros = 0
- for sup in self.super_instrs.values():
- if sup.kind == "super":
- n_supers += 1
- elif sup.kind == "macro":
- n_macros += 1
- self.write_super_macro(f, sup, indent)
-
- print(
- f"Wrote {n_instrs} instructions, {n_supers} supers, "
- f"and {n_macros} macros to {filename}",
- file=sys.stderr,
- )
+ for thing in self.everything:
+ match thing:
+ case parser.InstDef():
+ if thing.kind == "inst":
+ n_instrs += 1
+ self.write_instr(self.instrs[thing.name])
+ case parser.Super():
+ n_supers += 1
+ self.write_super(self.super_instrs[thing.name])
+ case parser.Macro():
+ n_macros += 1
+ self.write_macro(self.macro_instrs[thing.name])
+ case _:
+ typing.assert_never(thing)
- def write_super_macro(
- self, f: typing.TextIO, sup: SuperInstruction, indent: str = ""
- ) -> None:
+ print(
+ f"Wrote {n_instrs} instructions, {n_supers} supers, "
+ f"and {n_macros} macros to {self.output_filename}",
+ file=sys.stderr,
+ )
- # TODO: Make write() and block() methods of some Formatter class
- def write(arg: str) -> None:
- if arg:
- f.write(f"{indent}{arg}\n")
- else:
- f.write("\n")
+ def write_instr(self, instr: Instruction) -> None:
+ name = instr.name
+ self.out.emit("")
+ with self.out.block(f"TARGET({name})"):
+ if instr.predicted:
+ self.out.emit(f"PREDICTED({name});")
+ instr.write(self.out)
+ if not instr.always_exits:
+ for prediction in instr.predictions:
+ self.out.emit(f"PREDICT({prediction});")
+ self.out.emit(f"DISPATCH();")
+
+ def write_super(self, sup: SuperInstruction) -> None:
+ """Write code for a super-instruction."""
+ with self.wrap_super_or_macro(sup):
+ first = True
+ for comp in sup.parts:
+ if not first:
+ self.out.emit("NEXTOPARG();")
+ self.out.emit("JUMPBY(1);")
+ first = False
+ comp.write_body(self.out, 0)
+ if comp.instr.cache_offset:
+ self.out.emit(f"JUMPBY({comp.instr.cache_offset});")
+
+ def write_macro(self, mac: MacroInstruction) -> None:
+ """Write code for a macro instruction."""
+ with self.wrap_super_or_macro(mac):
+ cache_adjust = 0
+ for part in mac.parts:
+ match part:
+ case parser.CacheEffect(size=size):
+ cache_adjust += size
+ case Component() as comp:
+ comp.write_body(self.out, cache_adjust)
+ cache_adjust += comp.instr.cache_offset
+
+ if cache_adjust:
+ self.out.emit(f"JUMPBY({cache_adjust});")
+
+ @contextlib.contextmanager
+ def wrap_super_or_macro(self, up: SuperOrMacroInstruction):
+ """Shared boilerplate for super- and macro instructions."""
+ # TODO: Somewhere (where?) make it so that if one instruction
+ # has an output that is input to another, and the variable names
+ # and types match and don't conflict with other instructions,
+ # that variable is declared with the right name and type in the
+ # outer block, rather than trusting the compiler to optimize it.
+ self.out.emit("")
+ with self.out.block(f"TARGET({up.name})"):
+ for i, var in reversed(list(enumerate(up.stack))):
+ src = None
+ if i < up.initial_sp:
+ src = StackEffect(f"PEEK({up.initial_sp - i})", "")
+ self.out.declare(var, src)
- @contextlib.contextmanager
- def block(head: str):
- if head:
- write(head + " {")
- else:
- write("{")
- nonlocal indent
- indent += " "
yield
- indent = indent[:-4]
- write("}")
-
- write("")
- with block(f"TARGET({sup.name})"):
- for i, var in enumerate(sup.stack):
- if i < sup.initial_sp:
- write(f"PyObject *{var} = PEEK({sup.initial_sp - i});")
- else:
- write(f"PyObject *{var};")
-
- for i, comp in enumerate(sup.parts):
- if i > 0 and sup.kind == "super":
- write("NEXTOPARG();")
- write("next_instr++;")
-
- with block(""):
- for var, ieffect in comp.input_mapping.items():
- write(f"PyObject *{ieffect.name} = {var};")
- for oeffect in comp.output_mapping.values():
- write(f"PyObject *{oeffect.name};")
- comp.instr.write_body(f, indent, dedent=-4)
- for var, oeffect in comp.output_mapping.items():
- write(f"{var} = {oeffect.name};")
-
- if sup.final_sp > sup.initial_sp:
- write(f"STACK_GROW({sup.final_sp - sup.initial_sp});")
- elif sup.final_sp < sup.initial_sp:
- write(f"STACK_SHRINK({sup.initial_sp - sup.final_sp});")
- for i, var in enumerate(reversed(sup.stack[:sup.final_sp]), 1):
- write(f"POKE({i}, {var});")
- write("DISPATCH();")
-
-
-def always_exits(block: parser.Block) -> bool:
+
+ self.out.stack_adjust(up.final_sp - up.initial_sp)
+ for i, var in enumerate(reversed(up.stack[: up.final_sp]), 1):
+ dst = StackEffect(f"PEEK({i})", "")
+ self.out.assign(dst, var)
+
+ self.out.emit(f"DISPATCH();")
+
+
+def extract_block_text(block: parser.Block) -> tuple[list[str], list[str]]:
+ # Get lines of text with proper dedent
+ blocklines = block.text.splitlines(True)
+
+ # Remove blank lines from both ends
+ while blocklines and not blocklines[0].strip():
+ blocklines.pop(0)
+ while blocklines and not blocklines[-1].strip():
+ blocklines.pop()
+
+ # Remove leading and trailing braces
+ assert blocklines and blocklines[0].strip() == "{"
+ assert blocklines and blocklines[-1].strip() == "}"
+ blocklines.pop()
+ blocklines.pop(0)
+
+ # Remove trailing blank lines
+ while blocklines and not blocklines[-1].strip():
+ blocklines.pop()
+
+ # Separate PREDICT(...) macros from end
+ predictions: list[str] = []
+ while blocklines and (m := re.match(r"^\s*PREDICT\((\w+)\);\s*$", blocklines[-1])):
+ predictions.insert(0, m.group(1))
+ blocklines.pop()
+
+ return blocklines, predictions
+
+
+def always_exits(lines: list[str]) -> bool:
"""Determine whether a block always ends in a return/goto/etc."""
- text = block.text
- lines = text.splitlines()
- while lines and not lines[-1].strip():
- lines.pop()
- if not lines or lines[-1].strip() != "}":
- return False
- lines.pop()
if not lines:
return False
- line = lines.pop().rstrip()
+ line = lines[-1].rstrip()
# Indent must match exactly (TODO: Do something better)
if line[:12] != " " * 12:
return False
@@ -506,13 +702,12 @@ def always_exits(block: parser.Block) -> bool:
def main():
"""Parse command line, parse input, analyze, write output."""
args = arg_parser.parse_args() # Prints message and sys.exit(2) on error
- a = Analyzer(args.input) # Raises OSError if file not found
+ a = Analyzer(args.input, args.output) # Raises OSError if input unreadable
a.parse() # Raises SyntaxError on failure
- a.analyze() # Prints messages and raises SystemExit on failure
+ a.analyze() # Prints messages and sets a.errors on failure
if a.errors:
sys.exit(f"Found {a.errors} errors")
-
- a.write_instructions(args.output) # Raises OSError if file can't be written
+ a.write_instructions() # Raises OSError if output can't be written
if __name__ == "__main__":
diff --git a/Tools/cases_generator/lexer.py b/Tools/cases_generator/lexer.py
index 980c920bf357f4..39b6a212a67b1c 100644
--- a/Tools/cases_generator/lexer.py
+++ b/Tools/cases_generator/lexer.py
@@ -240,7 +240,12 @@ def to_text(tkns: list[Token], dedent: int = 0) -> str:
res.append('\n')
col = 1+dedent
res.append(' '*(c-col))
- res.append(tkn.text)
+ text = tkn.text
+ if dedent != 0 and tkn.kind == 'COMMENT' and '\n' in text:
+ if dedent < 0:
+ text = text.replace('\n', '\n' + ' '*-dedent)
+ # TODO: dedent > 0
+ res.append(text)
line, col = tkn.end
return ''.join(res)
diff --git a/Tools/cases_generator/parser.py b/Tools/cases_generator/parser.py
index ae5ef1e26ea1c2..d802c733dfd10c 100644
--- a/Tools/cases_generator/parser.py
+++ b/Tools/cases_generator/parser.py
@@ -9,10 +9,12 @@
P = TypeVar("P", bound="Parser")
N = TypeVar("N", bound="Node")
-def contextual(func: Callable[[P], N|None]) -> Callable[[P], N|None]:
+
+
+def contextual(func: Callable[[P], N | None]) -> Callable[[P], N | None]:
# Decorator to wrap grammar methods.
# Resets position if `func` returns None.
- def contextual_wrapper(self: P) -> N|None:
+ def contextual_wrapper(self: P) -> N | None:
begin = self.getpos()
res = func(self)
if res is None:
@@ -21,6 +23,7 @@ def contextual_wrapper(self: P) -> N|None:
end = self.getpos()
res.context = Context(begin, end, self)
return res
+
return contextual_wrapper
@@ -35,7 +38,7 @@ def __repr__(self):
@dataclass
class Node:
- context: Context|None = field(init=False, default=None)
+ context: Context | None = field(init=False, default=None)
@property
def text(self) -> str:
@@ -59,7 +62,8 @@ class Block(Node):
@dataclass
class StackEffect(Node):
name: str
- # TODO: type, condition
+ type: str = ""
+ # TODO: array, condition
@dataclass
@@ -68,8 +72,14 @@ class CacheEffect(Node):
size: int
+@dataclass
+class OpName(Node):
+ name: str
+
+
InputEffect = StackEffect | CacheEffect
OutputEffect = StackEffect
+UOp = OpName | CacheEffect
@dataclass
@@ -82,32 +92,23 @@ class InstHeader(Node):
@dataclass
class InstDef(Node):
- # TODO: Merge InstHeader and InstDef
- header: InstHeader
+ kind: Literal["inst", "op"]
+ name: str
+ inputs: list[InputEffect]
+ outputs: list[OutputEffect]
block: Block
- @property
- def kind(self) -> str:
- return self.header.kind
-
- @property
- def name(self) -> str:
- return self.header.name
- @property
- def inputs(self) -> list[InputEffect]:
- return self.header.inputs
-
- @property
- def outputs(self) -> list[OutputEffect]:
- return self.header.outputs
+@dataclass
+class Super(Node):
+ name: str
+ ops: list[OpName]
@dataclass
-class Super(Node):
- kind: Literal["macro", "super"]
+class Macro(Node):
name: str
- ops: list[str]
+ uops: list[UOp]
@dataclass
@@ -118,12 +119,22 @@ class Family(Node):
class Parser(PLexer):
+ @contextual
+ def definition(self) -> InstDef | Super | Macro | Family | None:
+ if inst := self.inst_def():
+ return inst
+ if super := self.super_def():
+ return super
+ if macro := self.macro_def():
+ return macro
+ if family := self.family_def():
+ return family
@contextual
def inst_def(self) -> InstDef | None:
- if header := self.inst_header():
+ if hdr := self.inst_header():
if block := self.block():
- return InstDef(header, block)
+ return InstDef(hdr.kind, hdr.name, hdr.inputs, hdr.outputs, block)
raise self.make_syntax_error("Expected block")
return None
@@ -132,24 +143,21 @@ def inst_header(self) -> InstHeader | None:
# inst(NAME)
# | inst(NAME, (inputs -- outputs))
# | op(NAME, (inputs -- outputs))
- # TODO: Error out when there is something unexpected.
# TODO: Make INST a keyword in the lexer.
if (tkn := self.expect(lx.IDENTIFIER)) and (kind := tkn.text) in ("inst", "op"):
- if (self.expect(lx.LPAREN)
- and (tkn := self.expect(lx.IDENTIFIER))):
+ if self.expect(lx.LPAREN) and (tkn := self.expect(lx.IDENTIFIER)):
name = tkn.text
if self.expect(lx.COMMA):
- inp, outp = self.stack_effect()
+ inp, outp = self.io_effect()
if self.expect(lx.RPAREN):
- if ((tkn := self.peek())
- and tkn.kind == lx.LBRACE):
+ if (tkn := self.peek()) and tkn.kind == lx.LBRACE:
return InstHeader(kind, name, inp, outp)
elif self.expect(lx.RPAREN) and kind == "inst":
# No legacy stack effect if kind is "op".
return InstHeader(kind, name, [], [])
return None
- def stack_effect(self) -> tuple[list[InputEffect], list[OutputEffect]]:
+ def io_effect(self) -> tuple[list[InputEffect], list[OutputEffect]]:
# '(' [inputs] '--' [outputs] ')'
if self.expect(lx.LPAREN):
inputs = self.inputs() or []
@@ -174,21 +182,7 @@ def inputs(self) -> list[InputEffect] | None:
@contextual
def input(self) -> InputEffect | None:
- # IDENTIFIER '/' INTEGER (CacheEffect)
- # IDENTIFIER (StackEffect)
- if (tkn := self.expect(lx.IDENTIFIER)):
- if self.expect(lx.DIVIDE):
- if num := self.expect(lx.NUMBER):
- try:
- size = int(num.text)
- except ValueError:
- raise self.make_syntax_error(
- f"Expected integer, got {num.text!r}")
- else:
- return CacheEffect(tkn.text, size)
- raise self.make_syntax_error("Expected integer")
- else:
- return StackEffect(tkn.text)
+ return self.cache_effect() or self.stack_effect()
def outputs(self) -> list[OutputEffect] | None:
# output (, output)*
@@ -205,46 +199,113 @@ def outputs(self) -> list[OutputEffect] | None:
@contextual
def output(self) -> OutputEffect | None:
- if (tkn := self.expect(lx.IDENTIFIER)):
- return StackEffect(tkn.text)
+ return self.stack_effect()
+
+ @contextual
+ def cache_effect(self) -> CacheEffect | None:
+ # IDENTIFIER '/' NUMBER
+ if tkn := self.expect(lx.IDENTIFIER):
+ if self.expect(lx.DIVIDE):
+ num = self.require(lx.NUMBER).text
+ try:
+ size = int(num)
+ except ValueError:
+ raise self.make_syntax_error(f"Expected integer, got {num!r}")
+ else:
+ return CacheEffect(tkn.text, size)
+
+ @contextual
+ def stack_effect(self) -> StackEffect | None:
+ # IDENTIFIER [':' IDENTIFIER]
+ # TODO: Arrays, conditions
+ if tkn := self.expect(lx.IDENTIFIER):
+ type = ""
+ if self.expect(lx.COLON):
+ type = self.require(lx.IDENTIFIER).text
+ return StackEffect(tkn.text, type)
@contextual
def super_def(self) -> Super | None:
- if (tkn := self.expect(lx.IDENTIFIER)) and (kind := tkn.text) in ("super", "macro"):
+ if (tkn := self.expect(lx.IDENTIFIER)) and tkn.text == "super":
if self.expect(lx.LPAREN):
- if (tkn := self.expect(lx.IDENTIFIER)):
+ if tkn := self.expect(lx.IDENTIFIER):
if self.expect(lx.RPAREN):
if self.expect(lx.EQUALS):
if ops := self.ops():
- res = Super(kind, tkn.text, ops)
+ self.require(lx.SEMI)
+ res = Super(tkn.text, ops)
return res
- def ops(self) -> list[str] | None:
- if tkn := self.expect(lx.IDENTIFIER):
- ops = [tkn.text]
+ def ops(self) -> list[OpName] | None:
+ if op := self.op():
+ ops = [op]
while self.expect(lx.PLUS):
- if tkn := self.require(lx.IDENTIFIER):
- ops.append(tkn.text)
- self.require(lx.SEMI)
+ if op := self.op():
+ ops.append(op)
return ops
+ @contextual
+ def op(self) -> OpName | None:
+ if tkn := self.expect(lx.IDENTIFIER):
+ return OpName(tkn.text)
+
+ @contextual
+ def macro_def(self) -> Macro | None:
+ if (tkn := self.expect(lx.IDENTIFIER)) and tkn.text == "macro":
+ if self.expect(lx.LPAREN):
+ if tkn := self.expect(lx.IDENTIFIER):
+ if self.expect(lx.RPAREN):
+ if self.expect(lx.EQUALS):
+ if uops := self.uops():
+ self.require(lx.SEMI)
+ res = Macro(tkn.text, uops)
+ return res
+
+ def uops(self) -> list[UOp] | None:
+ if uop := self.uop():
+ uops = [uop]
+ while self.expect(lx.PLUS):
+ if uop := self.uop():
+ uops.append(uop)
+ else:
+ raise self.make_syntax_error("Expected op name or cache effect")
+ return uops
+
+ @contextual
+ def uop(self) -> UOp | None:
+ if tkn := self.expect(lx.IDENTIFIER):
+ if self.expect(lx.DIVIDE):
+ if num := self.expect(lx.NUMBER):
+ try:
+ size = int(num.text)
+ except ValueError:
+ raise self.make_syntax_error(
+ f"Expected integer, got {num.text!r}"
+ )
+ else:
+ return CacheEffect(tkn.text, size)
+ raise self.make_syntax_error("Expected integer")
+ else:
+ return OpName(tkn.text)
+
@contextual
def family_def(self) -> Family | None:
if (tkn := self.expect(lx.IDENTIFIER)) and tkn.text == "family":
size = None
if self.expect(lx.LPAREN):
- if (tkn := self.expect(lx.IDENTIFIER)):
+ if tkn := self.expect(lx.IDENTIFIER):
if self.expect(lx.COMMA):
if not (size := self.expect(lx.IDENTIFIER)):
- raise self.make_syntax_error(
- "Expected identifier")
+ raise self.make_syntax_error("Expected identifier")
if self.expect(lx.RPAREN):
if self.expect(lx.EQUALS):
if not self.expect(lx.LBRACE):
raise self.make_syntax_error("Expected {")
if members := self.members():
if self.expect(lx.RBRACE) and self.expect(lx.SEMI):
- return Family(tkn.text, size.text if size else "", members)
+ return Family(
+ tkn.text, size.text if size else "", members
+ )
return None
def members(self) -> list[str] | None:
@@ -284,6 +345,7 @@ def c_blob(self) -> list[lx.Token]:
if __name__ == "__main__":
import sys
+
if sys.argv[1:]:
filename = sys.argv[1]
if filename == "-c" and sys.argv[2:]:
@@ -295,10 +357,10 @@ def c_blob(self) -> list[lx.Token]:
srclines = src.splitlines()
begin = srclines.index("// BEGIN BYTECODES //")
end = srclines.index("// END BYTECODES //")
- src = "\n".join(srclines[begin+1 : end])
+ src = "\n".join(srclines[begin + 1 : end])
else:
filename = ""
src = "if (x) { x.foo; // comment\n}"
parser = Parser(src, filename)
- x = parser.inst_def() or parser.super_def() or parser.family_def()
+ x = parser.definition()
print(x)
diff --git a/Tools/clinic/clinic.py b/Tools/clinic/clinic.py
index 0ece814e8f1883..fdf8041e14bbc1 100755
--- a/Tools/clinic/clinic.py
+++ b/Tools/clinic/clinic.py
@@ -5212,10 +5212,6 @@ def state_terminal(self, line):
def main(argv):
import sys
-
- if sys.version_info.major < 3 or sys.version_info.minor < 3:
- sys.exit("Error: clinic.py requires Python 3.3 or greater.")
-
import argparse
cmdline = argparse.ArgumentParser(
description="""Preprocessor for CPython C files.
diff --git a/Tools/scripts/summarize_stats.py b/Tools/scripts/summarize_stats.py
index 9c098064fe5403..81b06f9f7469ab 100644
--- a/Tools/scripts/summarize_stats.py
+++ b/Tools/scripts/summarize_stats.py
@@ -32,7 +32,17 @@
opmap = {name: i for i, name in enumerate(opname)}
opmap = dict(sorted(opmap.items()))
-TOTAL = "specialization.deferred", "specialization.hit", "specialization.miss", "execution_count"
+TOTAL = "specialization.hit", "specialization.miss", "execution_count"
+
+def format_ratio(num, den):
+ """
+ Format a ratio as a percentage. When the denominator is 0, returns the empty
+ string.
+ """
+ if den == 0:
+ return ""
+ else:
+ return f"{num/den:.01%}"
def join_rows(a_rows, b_rows):
"""
@@ -80,14 +90,14 @@ def calculate_specialization_stats(family_stats, total):
if key in ("specialization.hit", "specialization.miss"):
label = key[len("specialization."):]
elif key == "execution_count":
- label = "unquickened"
+ continue
elif key in ("specialization.success", "specialization.failure", "specializable"):
continue
elif key.startswith("pair"):
continue
else:
label = key
- rows.append((f"{label:>12}", f"{family_stats[key]:>12}", f"{100*family_stats[key]/total:0.1f}%"))
+ rows.append((f"{label:>12}", f"{family_stats[key]:>12}", format_ratio(family_stats[key], total)))
return rows
def calculate_specialization_success_failure(family_stats):
@@ -100,12 +110,12 @@ def calculate_specialization_success_failure(family_stats):
label = key[len("specialization."):]
label = label[0].upper() + label[1:]
val = family_stats.get(key, 0)
- rows.append((label, val, f"{100*val/total_attempts:0.1f}%"))
+ rows.append((label, val, format_ratio(val, total_attempts)))
return rows
def calculate_specialization_failure_kinds(name, family_stats, defines):
total_failures = family_stats.get("specialization.failure", 0)
- failure_kinds = [ 0 ] * 30
+ failure_kinds = [ 0 ] * 40
for key in family_stats:
if not key.startswith("specialization.failure_kind"):
continue
@@ -118,7 +128,7 @@ def calculate_specialization_failure_kinds(name, family_stats, defines):
for value, index in failures:
if not value:
continue
- rows.append((kind_to_text(index, defines, name), value, f"{100*value/total_failures:0.1f}%"))
+ rows.append((kind_to_text(index, defines, name), value, format_ratio(value, total_failures)))
return rows
def print_specialization_stats(name, family_stats, defines):
@@ -184,6 +194,7 @@ def gather_stats(input):
key = key.strip()
value = int(value)
stats[key] += value
+ stats['__nfiles__'] += 1
return stats
else:
raise ValueError(f"{input:r} is not a file or directory path")
@@ -213,7 +224,7 @@ def pretty(defname):
return defname.replace("_", " ").lower()
def kind_to_text(kind, defines, opname):
- if kind < 7:
+ if kind <= 7:
return pretty(defines[kind][0])
if opname.endswith("ATTR"):
opname = "ATTR"
@@ -230,10 +241,7 @@ def categorized_counts(opcode_stats):
not_specialized = 0
specialized_instructions = {
op for op in opcode._specialized_instructions
- if "__" not in op and "ADAPTIVE" not in op}
- adaptive_instructions = {
- op for op in opcode._specialized_instructions
- if "ADAPTIVE" in op}
+ if "__" not in op}
for i, opcode_stat in enumerate(opcode_stats):
if "execution_count" not in opcode_stat:
continue
@@ -241,8 +249,6 @@ def categorized_counts(opcode_stats):
name = opname[i]
if "specializable" in opcode_stat:
not_specialized += count
- elif name in adaptive_instructions:
- not_specialized += count
elif name in specialized_instructions:
miss = opcode_stat.get("specialization.miss", 0)
not_specialized += miss
@@ -317,11 +323,11 @@ def calculate_execution_counts(opcode_stats, total):
for (count, name, miss) in counts:
cumulative += count
if miss:
- miss = f"{100*miss/count:0.1f}%"
+ miss = format_ratio(miss, count)
else:
miss = ""
- rows.append((name, count, f"{100*count/total:0.1f}%",
- f"{100*cumulative/total:0.1f}%", miss))
+ rows.append((name, count, format_ratio(count, total),
+ format_ratio(cumulative, total), miss))
return rows
def emit_execution_counts(opcode_stats, total):
@@ -385,9 +391,9 @@ def emit_comparative_specialization_stats(base_opcode_stats, head_opcode_stats):
def calculate_specialization_effectiveness(opcode_stats, total):
basic, not_specialized, specialized = categorized_counts(opcode_stats)
return [
- ("Basic", basic, f"{basic*100/total:0.1f}%"),
- ("Not specialized", not_specialized, f"{not_specialized*100/total:0.1f}%"),
- ("Specialized", specialized, f"{specialized*100/total:0.1f}%"),
+ ("Basic", basic, format_ratio(basic, total)),
+ ("Not specialized", not_specialized, format_ratio(not_specialized, total)),
+ ("Specialized", specialized, format_ratio(specialized, total)),
]
def emit_specialization_overview(opcode_stats, total):
@@ -404,7 +410,7 @@ def emit_specialization_overview(opcode_stats, total):
counts.sort(reverse=True)
if total:
with Section(f"{title} by instruction", 3):
- rows = [ (name, count, f"{100*count/total:0.1f}%") for (count, name) in counts[:10] ]
+ rows = [ (name, count, format_ratio(count, total)) for (count, name) in counts[:10] ]
emit_table(("Name", "Count:", "Ratio:"), rows)
def emit_comparative_specialization_overview(base_opcode_stats, base_total, head_opcode_stats, head_total):
@@ -431,15 +437,15 @@ def calculate_call_stats(stats):
rows = []
for key, value in stats.items():
if "Calls to" in key:
- rows.append((key, value, f"{100*value/total:0.1f}%"))
+ rows.append((key, value, format_ratio(value, total)))
elif key.startswith("Calls "):
name, index = key[:-1].split("[")
index = int(index)
label = name + " (" + pretty(defines[index][0]) + ")"
- rows.append((label, value, f"{100*value/total:0.1f}%"))
+ rows.append((label, value, format_ratio(value, total)))
for key, value in stats.items():
if key.startswith("Frame"):
- rows.append((key, value, f"{100*value/total:0.1f}%"))
+ rows.append((key, value, format_ratio(value, total)))
return rows
def emit_call_stats(stats):
@@ -467,13 +473,13 @@ def calculate_object_stats(stats):
for key, value in stats.items():
if key.startswith("Object"):
if "materialize" in key:
- ratio = f"{100*value/total_materializations:0.1f}%"
+ ratio = format_ratio(value, total_materializations)
elif "allocations" in key:
- ratio = f"{100*value/total_allocations:0.1f}%"
+ ratio = format_ratio(value, total_allocations)
elif "increfs" in key:
- ratio = f"{100*value/total_increfs:0.1f}%"
+ ratio = format_ratio(value, total_increfs)
elif "decrefs" in key:
- ratio = f"{100*value/total_decrefs:0.1f}%"
+ ratio = format_ratio(value, total_decrefs)
else:
ratio = ""
label = key[6:].strip()
@@ -516,8 +522,8 @@ def emit_pair_counts(opcode_stats, total):
for (count, pair) in itertools.islice(pair_counts, 100):
i, j = pair
cumulative += count
- rows.append((opname[i] + " " + opname[j], count, f"{100*count/total:0.1f}%",
- f"{100*cumulative/total:0.1f}%"))
+ rows.append((opname[i] + " " + opname[j], count, format_ratio(count, total),
+ format_ratio(cumulative, total)))
emit_table(("Pair", "Count:", "Self:", "Cumulative:"),
rows
)
@@ -561,6 +567,9 @@ def output_single_stats(stats):
emit_specialization_overview(opcode_stats, total)
emit_call_stats(stats)
emit_object_stats(stats)
+ with Section("Meta stats", summary="Meta statistics"):
+ emit_table(("", "Count:"), [('Number of data files', stats['__nfiles__'])])
+
def output_comparative_stats(base_stats, head_stats):
base_opcode_stats = extract_opcode_stats(base_stats)
diff --git a/netlify.toml b/netlify.toml
new file mode 100644
index 00000000000000..f5790fc5fec74f
--- /dev/null
+++ b/netlify.toml
@@ -0,0 +1,11 @@
+[build]
+ base = "Doc/"
+ command = "make html"
+ publish = "build/html"
+ # Do not trigger netlify builds if docs were not changed.
+ # Changed files should be in sync with `.github/workflows/doc.yml`
+ ignore = "git diff --quiet $CACHED_COMMIT_REF $COMMIT_REF . ../netlify.toml"
+
+[build.environment]
+ PYTHON_VERSION = "3.8"
+ IS_DEPLOYMENT_PREVIEW = "true"