diff -Nru python3.11-3.11.8/.editorconfig python3.11-3.11.9/.editorconfig --- python3.11-3.11.8/.editorconfig 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/.editorconfig 2024-04-02 08:25:04.000000000 +0000 @@ -1,6 +1,6 @@ root = true -[*.{py,c,cpp,h,rst,md,yml}] +[*.{py,c,cpp,h,js,rst,md,yml}] trim_trailing_whitespace = true insert_final_newline = true indent_style = space @@ -11,5 +11,5 @@ [*.rst] indent_size = 3 -[*.yml] +[*.{js,yml}] indent_size = 2 diff -Nru python3.11-3.11.8/.pre-commit-config.yaml python3.11-3.11.9/.pre-commit-config.yaml --- python3.11-3.11.8/.pre-commit-config.yaml 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/.pre-commit-config.yaml 2024-04-02 08:25:04.000000000 +0000 @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.2.0 + rev: v0.3.4 hooks: - id: ruff name: Run Ruff on Lib/test/ @@ -10,6 +10,8 @@ - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.5.0 hooks: + - id: check-case-conflict + - id: check-merge-conflict - id: check-toml exclude: ^Lib/test/test_tomllib/ - id: check-yaml diff -Nru python3.11-3.11.8/Doc/Makefile python3.11-3.11.9/Doc/Makefile --- python3.11-3.11.8/Doc/Makefile 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/Makefile 2024-04-02 08:25:04.000000000 +0000 @@ -177,6 +177,7 @@ echo "venv already exists."; \ echo "To recreate it, remove it first with \`make clean-venv'."; \ else \ + echo "Creating venv in $(VENVDIR)"; \ $(PYTHON) -m venv $(VENVDIR); \ $(VENVDIR)/bin/python3 -m pip install --upgrade pip; \ $(VENVDIR)/bin/python3 -m pip install -r $(REQUIREMENTS); \ diff -Nru python3.11-3.11.8/Doc/c-api/buffer.rst python3.11-3.11.9/Doc/c-api/buffer.rst --- python3.11-3.11.8/Doc/c-api/buffer.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/c-api/buffer.rst 2024-04-02 08:25:04.000000000 +0000 @@ -29,7 +29,7 @@ Python provides such a facility at the C level in the form of the :ref:`buffer protocol `. This protocol has two sides: -.. index:: single: PyBufferProcs +.. index:: single: PyBufferProcs (C type) - on the producer side, a type can export a "buffer interface" which allows objects of that type to expose information about their underlying buffer. diff -Nru python3.11-3.11.8/Doc/c-api/code.rst python3.11-3.11.9/Doc/c-api/code.rst --- python3.11-3.11.8/Doc/c-api/code.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/c-api/code.rst 2024-04-02 08:25:04.000000000 +0000 @@ -22,12 +22,13 @@ .. c:var:: PyTypeObject PyCode_Type This is an instance of :c:type:`PyTypeObject` representing the Python - :class:`code` type. + :ref:`code object `. .. c:function:: int PyCode_Check(PyObject *co) - Return true if *co* is a :class:`code` object. This function always succeeds. + Return true if *co* is a :ref:`code object `. + This function always succeeds. .. c:function:: int PyCode_GetNumFree(PyCodeObject *co) diff -Nru python3.11-3.11.8/Doc/c-api/contextvars.rst python3.11-3.11.9/Doc/c-api/contextvars.rst --- python3.11-3.11.8/Doc/c-api/contextvars.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/c-api/contextvars.rst 2024-04-02 08:25:04.000000000 +0000 @@ -6,6 +6,8 @@ ------------------------- .. _contextvarsobjects_pointertype_change: +.. versionadded:: 3.7 + .. versionchanged:: 3.7.1 .. note:: @@ -24,8 +26,6 @@ See :issue:`34762` for more details. -.. versionadded:: 3.7 - This section details the public C API for the :mod:`contextvars` module. .. c:type:: PyContext diff -Nru python3.11-3.11.8/Doc/c-api/exceptions.rst python3.11-3.11.9/Doc/c-api/exceptions.rst --- python3.11-3.11.8/Doc/c-api/exceptions.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/c-api/exceptions.rst 2024-04-02 08:25:04.000000000 +0000 @@ -148,7 +148,7 @@ .. c:function:: PyObject* PyErr_SetFromErrno(PyObject *type) - .. index:: single: strerror() + .. index:: single: strerror (C function) This is a convenience function to raise an exception when a C library function has returned an error and set the C variable :c:data:`errno`. It constructs a @@ -364,7 +364,7 @@ .. c:function:: int PyErr_ResourceWarning(PyObject *source, Py_ssize_t stack_level, const char *format, ...) Function similar to :c:func:`PyErr_WarnFormat`, but *category* is - :exc:`ResourceWarning` and it passes *source* to :func:`warnings.WarningMessage`. + :exc:`ResourceWarning` and it passes *source* to :class:`!warnings.WarningMessage`. .. versionadded:: 3.6 @@ -550,7 +550,7 @@ .. index:: pair: module; signal - single: SIGINT + single: SIGINT (C macro) single: KeyboardInterrupt (built-in exception) This function interacts with Python's signal handling. @@ -581,7 +581,7 @@ .. index:: pair: module; signal - single: SIGINT + single: SIGINT (C macro) single: KeyboardInterrupt (built-in exception) Simulate the effect of a :c:macro:`!SIGINT` signal arriving. @@ -647,7 +647,7 @@ This creates a class object derived from :exc:`Exception` (accessible in C as :c:data:`PyExc_Exception`). - The :attr:`__module__` attribute of the new class is set to the first part (up + The :attr:`!__module__` attribute of the new class is set to the first part (up to the last dot) of the *name* argument, and the class name is set to the last part (after the last dot). The *base* argument can be used to specify alternate base classes; it can either be only one class or a tuple of classes. The *dict* @@ -797,8 +797,8 @@ Marks a point where a recursive C-level call is about to be performed. - If :c:macro:`USE_STACKCHECK` is defined, this function checks if the OS - stack overflowed using :c:func:`PyOS_CheckStack`. In this is the case, it + If :c:macro:`!USE_STACKCHECK` is defined, this function checks if the OS + stack overflowed using :c:func:`PyOS_CheckStack`. If this is the case, it sets a :exc:`MemoryError` and returns a nonzero value. The function then checks if the recursion limit is reached. If this is the @@ -861,59 +861,59 @@ the variables: .. index:: - single: PyExc_BaseException - single: PyExc_Exception - single: PyExc_ArithmeticError - single: PyExc_AssertionError - single: PyExc_AttributeError - single: PyExc_BlockingIOError - single: PyExc_BrokenPipeError - single: PyExc_BufferError - single: PyExc_ChildProcessError - single: PyExc_ConnectionAbortedError - single: PyExc_ConnectionError - single: PyExc_ConnectionRefusedError - single: PyExc_ConnectionResetError - single: PyExc_EOFError - single: PyExc_FileExistsError - single: PyExc_FileNotFoundError - single: PyExc_FloatingPointError - single: PyExc_GeneratorExit - single: PyExc_ImportError - single: PyExc_IndentationError - single: PyExc_IndexError - single: PyExc_InterruptedError - single: PyExc_IsADirectoryError - single: PyExc_KeyError - single: PyExc_KeyboardInterrupt - single: PyExc_LookupError - single: PyExc_MemoryError - single: PyExc_ModuleNotFoundError - single: PyExc_NameError - single: PyExc_NotADirectoryError - single: PyExc_NotImplementedError - single: PyExc_OSError - single: PyExc_OverflowError - single: PyExc_PermissionError - single: PyExc_ProcessLookupError - single: PyExc_RecursionError - single: PyExc_ReferenceError - single: PyExc_RuntimeError - single: PyExc_StopAsyncIteration - single: PyExc_StopIteration - single: PyExc_SyntaxError - single: PyExc_SystemError - single: PyExc_SystemExit - single: PyExc_TabError - single: PyExc_TimeoutError - single: PyExc_TypeError - single: PyExc_UnboundLocalError - single: PyExc_UnicodeDecodeError - single: PyExc_UnicodeEncodeError - single: PyExc_UnicodeError - single: PyExc_UnicodeTranslateError - single: PyExc_ValueError - single: PyExc_ZeroDivisionError + single: PyExc_BaseException (C var) + single: PyExc_Exception (C var) + single: PyExc_ArithmeticError (C var) + single: PyExc_AssertionError (C var) + single: PyExc_AttributeError (C var) + single: PyExc_BlockingIOError (C var) + single: PyExc_BrokenPipeError (C var) + single: PyExc_BufferError (C var) + single: PyExc_ChildProcessError (C var) + single: PyExc_ConnectionAbortedError (C var) + single: PyExc_ConnectionError (C var) + single: PyExc_ConnectionRefusedError (C var) + single: PyExc_ConnectionResetError (C var) + single: PyExc_EOFError (C var) + single: PyExc_FileExistsError (C var) + single: PyExc_FileNotFoundError (C var) + single: PyExc_FloatingPointError (C var) + single: PyExc_GeneratorExit (C var) + single: PyExc_ImportError (C var) + single: PyExc_IndentationError (C var) + single: PyExc_IndexError (C var) + single: PyExc_InterruptedError (C var) + single: PyExc_IsADirectoryError (C var) + single: PyExc_KeyError (C var) + single: PyExc_KeyboardInterrupt (C var) + single: PyExc_LookupError (C var) + single: PyExc_MemoryError (C var) + single: PyExc_ModuleNotFoundError (C var) + single: PyExc_NameError (C var) + single: PyExc_NotADirectoryError (C var) + single: PyExc_NotImplementedError (C var) + single: PyExc_OSError (C var) + single: PyExc_OverflowError (C var) + single: PyExc_PermissionError (C var) + single: PyExc_ProcessLookupError (C var) + single: PyExc_RecursionError (C var) + single: PyExc_ReferenceError (C var) + single: PyExc_RuntimeError (C var) + single: PyExc_StopAsyncIteration (C var) + single: PyExc_StopIteration (C var) + single: PyExc_SyntaxError (C var) + single: PyExc_SystemError (C var) + single: PyExc_SystemExit (C var) + single: PyExc_TabError (C var) + single: PyExc_TimeoutError (C var) + single: PyExc_TypeError (C var) + single: PyExc_UnboundLocalError (C var) + single: PyExc_UnicodeDecodeError (C var) + single: PyExc_UnicodeEncodeError (C var) + single: PyExc_UnicodeError (C var) + single: PyExc_UnicodeTranslateError (C var) + single: PyExc_ValueError (C var) + single: PyExc_ZeroDivisionError (C var) +-----------------------------------------+---------------------------------+----------+ | C Name | Python Name | Notes | @@ -1044,18 +1044,18 @@ These are compatibility aliases to :c:data:`PyExc_OSError`: .. index:: - single: PyExc_EnvironmentError - single: PyExc_IOError - single: PyExc_WindowsError + single: PyExc_EnvironmentError (C var) + single: PyExc_IOError (C var) + single: PyExc_WindowsError (C var) +-------------------------------------+----------+ | C Name | Notes | +=====================================+==========+ -| :c:data:`PyExc_EnvironmentError` | | +| :c:data:`!PyExc_EnvironmentError` | | +-------------------------------------+----------+ -| :c:data:`PyExc_IOError` | | +| :c:data:`!PyExc_IOError` | | +-------------------------------------+----------+ -| :c:data:`PyExc_WindowsError` | [2]_ | +| :c:data:`!PyExc_WindowsError` | [2]_ | +-------------------------------------+----------+ .. versionchanged:: 3.3 @@ -1081,17 +1081,17 @@ the variables: .. index:: - single: PyExc_Warning - single: PyExc_BytesWarning - single: PyExc_DeprecationWarning - single: PyExc_FutureWarning - single: PyExc_ImportWarning - single: PyExc_PendingDeprecationWarning - single: PyExc_ResourceWarning - single: PyExc_RuntimeWarning - single: PyExc_SyntaxWarning - single: PyExc_UnicodeWarning - single: PyExc_UserWarning + single: PyExc_Warning (C var) + single: PyExc_BytesWarning (C var) + single: PyExc_DeprecationWarning (C var) + single: PyExc_FutureWarning (C var) + single: PyExc_ImportWarning (C var) + single: PyExc_PendingDeprecationWarning (C var) + single: PyExc_ResourceWarning (C var) + single: PyExc_RuntimeWarning (C var) + single: PyExc_SyntaxWarning (C var) + single: PyExc_UnicodeWarning (C var) + single: PyExc_UserWarning (C var) +------------------------------------------+---------------------------------+----------+ | C Name | Python Name | Notes | diff -Nru python3.11-3.11.8/Doc/c-api/file.rst python3.11-3.11.9/Doc/c-api/file.rst --- python3.11-3.11.8/Doc/c-api/file.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/c-api/file.rst 2024-04-02 08:25:04.000000000 +0000 @@ -96,7 +96,7 @@ .. c:function:: int PyFile_WriteObject(PyObject *obj, PyObject *p, int flags) - .. index:: single: Py_PRINT_RAW + .. index:: single: Py_PRINT_RAW (C macro) Write object *obj* to file object *p*. The only supported flag for *flags* is :c:macro:`Py_PRINT_RAW`; if given, the :func:`str` of the object is written diff -Nru python3.11-3.11.8/Doc/c-api/gcsupport.rst python3.11-3.11.9/Doc/c-api/gcsupport.rst --- python3.11-3.11.8/Doc/c-api/gcsupport.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/c-api/gcsupport.rst 2024-04-02 08:25:04.000000000 +0000 @@ -64,10 +64,15 @@ :c:macro:`Py_TPFLAGS_HAVE_GC` flag set. -.. c:function:: TYPE* PyObject_GC_Resize(TYPE, PyVarObject *op, Py_ssize_t newsize) +.. c:macro:: PyObject_GC_Resize(TYPE, op, newsize) - Resize an object allocated by :c:macro:`PyObject_NewVar`. Returns the - resized object or ``NULL`` on failure. *op* must not be tracked by the collector yet. + Resize an object allocated by :c:macro:`PyObject_NewVar`. + Returns the resized object of type ``TYPE*`` (refers to any C type) + or ``NULL`` on failure. + + *op* must be of type :c:expr:`PyVarObject *` + and must not be tracked by the collector yet. + *newsize* must be of type :c:type:`Py_ssize_t`. .. c:function:: void PyObject_GC_Track(PyObject *op) diff -Nru python3.11-3.11.8/Doc/c-api/hash.rst python3.11-3.11.9/Doc/c-api/hash.rst --- python3.11-3.11.8/Doc/c-api/hash.rst 1970-01-01 00:00:00.000000000 +0000 +++ python3.11-3.11.9/Doc/c-api/hash.rst 2024-04-02 08:25:04.000000000 +0000 @@ -0,0 +1,51 @@ +.. highlight:: c + +PyHash API +---------- + +See also the :c:member:`PyTypeObject.tp_hash` member. + +.. c:type:: Py_hash_t + + Hash value type: signed integer. + + .. versionadded:: 3.2 + +.. c:type:: Py_uhash_t + + Hash value type: unsigned integer. + + .. versionadded:: 3.2 + + +.. c:type:: PyHash_FuncDef + + Hash function definition used by :c:func:`PyHash_GetFuncDef`. + + .. c::member:: Py_hash_t (*const hash)(const void *, Py_ssize_t) + + Hash function. + + .. c:member:: const char *name + + Hash function name (UTF-8 encoded string). + + .. c:member:: const int hash_bits + + Internal size of the hash value in bits. + + .. c:member:: const int seed_bits + + Size of seed input in bits. + + .. versionadded:: 3.4 + + +.. c:function:: PyHash_FuncDef* PyHash_GetFuncDef(void) + + Get the hash function definition. + + .. seealso:: + :pep:`456` "Secure and interchangeable hash algorithm". + + .. versionadded:: 3.4 diff -Nru python3.11-3.11.8/Doc/c-api/import.rst python3.11-3.11.9/Doc/c-api/import.rst --- python3.11-3.11.8/Doc/c-api/import.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/c-api/import.rst 2024-04-02 08:25:04.000000000 +0000 @@ -281,7 +281,7 @@ The module name, as an ASCII encoded string. - .. c: member:: PyObject* (*initfunc)(void) + .. c:member:: PyObject* (*initfunc)(void) Initialization function for a module built into the interpreter. diff -Nru python3.11-3.11.8/Doc/c-api/init.rst python3.11-3.11.9/Doc/c-api/init.rst --- python3.11-3.11.8/Doc/c-api/init.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/c-api/init.rst 2024-04-02 08:25:04.000000000 +0000 @@ -237,9 +237,9 @@ pair: module; __main__ pair: module; sys triple: module; search; path - single: PySys_SetArgv() - single: PySys_SetArgvEx() - single: Py_FinalizeEx() + single: PySys_SetArgv (C function) + single: PySys_SetArgvEx (C function) + single: Py_FinalizeEx (C function) Initialize the Python interpreter. In an application embedding Python, this should be called before using any other Python/C API functions; see @@ -740,7 +740,7 @@ two threads simultaneously increment the reference count of the same object, the reference count could end up being incremented only once instead of twice. -.. index:: single: setswitchinterval() (in module sys) +.. index:: single: setswitchinterval (in module sys) Therefore, the rule exists that only the thread that has acquired the :term:`GIL` may operate on Python objects or call Python/C API functions. @@ -750,8 +750,7 @@ a file, so that other Python threads can run in the meantime. .. index:: - single: PyThreadState - single: PyThreadState + single: PyThreadState (C type) The Python interpreter keeps some thread-specific bookkeeping information inside a data structure called :c:type:`PyThreadState`. There's also one @@ -777,8 +776,8 @@ Py_END_ALLOW_THREADS .. index:: - single: Py_BEGIN_ALLOW_THREADS - single: Py_END_ALLOW_THREADS + single: Py_BEGIN_ALLOW_THREADS (C macro) + single: Py_END_ALLOW_THREADS (C macro) The :c:macro:`Py_BEGIN_ALLOW_THREADS` macro opens a new block and declares a hidden local variable; the :c:macro:`Py_END_ALLOW_THREADS` macro closes the @@ -793,8 +792,8 @@ PyEval_RestoreThread(_save); .. index:: - single: PyEval_RestoreThread() - single: PyEval_SaveThread() + single: PyEval_RestoreThread (C function) + single: PyEval_SaveThread (C function) Here is how these functions work: the global interpreter lock is used to protect the pointer to the current thread state. When releasing the lock and saving the thread state, @@ -1417,8 +1416,8 @@ entry.) .. index:: - single: Py_FinalizeEx() - single: Py_Initialize() + single: Py_FinalizeEx (C function) + single: Py_Initialize (C function) Extension modules are shared between (sub-)interpreters as follows: @@ -1446,12 +1445,12 @@ As with multi-phase initialization, this means that only C-level static and global variables are shared between these modules. - .. index:: single: close() (in module os) + .. index:: single: close (in module os) .. c:function:: void Py_EndInterpreter(PyThreadState *tstate) - .. index:: single: Py_FinalizeEx() + .. index:: single: Py_FinalizeEx (C function) Destroy the (sub-)interpreter represented by the given thread state. The given thread state must be the current thread state. See the discussion of thread @@ -1501,8 +1500,6 @@ .. c:function:: int Py_AddPendingCall(int (*func)(void *), void *arg) - .. index:: single: Py_AddPendingCall() - Schedule a function to be called from the main interpreter thread. On success, ``0`` is returned and *func* is queued for being called in the main thread. On failure, ``-1`` is returned without setting any exception. @@ -1536,14 +1533,14 @@ function is generally **not** suitable for calling Python code from arbitrary C threads. Instead, use the :ref:`PyGILState API`. + .. versionadded:: 3.1 + .. versionchanged:: 3.9 If this function is called in a subinterpreter, the function *func* is now scheduled to be called from the subinterpreter, rather than being called from the main interpreter. Each subinterpreter now has its own list of scheduled calls. - .. versionadded:: 3.1 - .. _profiling: Profiling and Tracing diff -Nru python3.11-3.11.8/Doc/c-api/intro.rst python3.11-3.11.9/Doc/c-api/intro.rst --- python3.11-3.11.8/Doc/c-api/intro.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/c-api/intro.rst 2024-04-02 08:25:04.000000000 +0000 @@ -148,7 +148,7 @@ worse performances (due to increased code size for example). The compiler is usually smarter than the developer for the cost/benefit analysis. - If Python is :ref:`built in debug mode ` (if the ``Py_DEBUG`` + If Python is :ref:`built in debug mode ` (if the :c:macro:`Py_DEBUG` macro is defined), the :c:macro:`Py_ALWAYS_INLINE` macro does nothing. It must be specified before the function return type. Usage:: @@ -325,8 +325,8 @@ is "don't do that.") .. index:: - single: Py_INCREF() - single: Py_DECREF() + single: Py_INCREF (C function) + single: Py_DECREF (C function) Reference counts are always manipulated explicitly. The normal way is to use the macro :c:func:`Py_INCREF` to take a new reference to an @@ -401,8 +401,8 @@ responsible for it any longer. .. index:: - single: PyList_SetItem() - single: PyTuple_SetItem() + single: PyList_SetItem (C function) + single: PyTuple_SetItem (C function) Few functions steal references; the two notable exceptions are :c:func:`PyList_SetItem` and :c:func:`PyTuple_SetItem`, which steal a reference @@ -491,8 +491,8 @@ arguments), you do own a reference to the returned object. .. index:: - single: PyList_GetItem() - single: PySequence_GetItem() + single: PyList_GetItem (C function) + single: PySequence_GetItem (C function) Here is an example of how you could write a function that computes the sum of the items in a list of integers; once using :c:func:`PyList_GetItem`, and once @@ -587,7 +587,7 @@ interpreter, where they are reported to the user accompanied by a stack traceback. -.. index:: single: PyErr_Occurred() +.. index:: single: PyErr_Occurred (C function) For C programmers, however, error checking always has to be explicit. All functions in the Python/C API can raise exceptions, unless an explicit claim is @@ -601,8 +601,8 @@ :c:func:`PyErr_Occurred`. These exceptions are always explicitly documented. .. index:: - single: PyErr_SetString() - single: PyErr_Clear() + single: PyErr_SetString (C function) + single: PyErr_Clear (C function) Exception state is maintained in per-thread storage (this is equivalent to using global storage in an unthreaded application). A thread can be in one of @@ -624,7 +624,7 @@ bytecode interpreter's main loop, which takes care of transferring it to ``sys.exc_info()`` and friends. -.. index:: single: exc_info() (in module sys) +.. index:: single: exc_info (in module sys) Note that starting with Python 1.5, the preferred, thread-safe way to access the exception state from Python code is to call the function :func:`sys.exc_info`, @@ -709,9 +709,9 @@ .. index:: single: incr_item() .. index:: - single: PyErr_ExceptionMatches() - single: PyErr_Clear() - single: Py_XDECREF() + single: PyErr_ExceptionMatches (C function) + single: PyErr_Clear (C function) + single: Py_XDECREF (C function) This example represents an endorsed use of the ``goto`` statement in C! It illustrates the use of :c:func:`PyErr_ExceptionMatches` and @@ -735,7 +735,7 @@ interpreter can only be used after the interpreter has been initialized. .. index:: - single: Py_Initialize() + single: Py_Initialize (C function) pair: module; builtins pair: module; __main__ pair: module; sys @@ -770,11 +770,11 @@ front of the standard path by setting :envvar:`PYTHONPATH`. .. index:: - single: Py_SetProgramName() - single: Py_GetPath() - single: Py_GetPrefix() - single: Py_GetExecPrefix() - single: Py_GetProgramFullPath() + single: Py_SetProgramName (C function) + single: Py_GetPath (C function) + single: Py_GetPrefix (C function) + single: Py_GetExecPrefix (C function) + single: Py_GetProgramFullPath (C function) The embedding application can steer the search by calling ``Py_SetProgramName(file)`` *before* calling :c:func:`Py_Initialize`. Note that @@ -784,7 +784,7 @@ :c:func:`Py_GetPrefix`, :c:func:`Py_GetExecPrefix`, and :c:func:`Py_GetProgramFullPath` (all defined in :file:`Modules/getpath.c`). -.. index:: single: Py_IsInitialized() +.. index:: single: Py_IsInitialized (C function) Sometimes, it is desirable to "uninitialize" Python. For instance, the application may want to start over (make another call to @@ -812,12 +812,14 @@ allocator, or low-level profiling of the main interpreter loop. Only the most frequently used builds will be described in the remainder of this section. -Compiling the interpreter with the :c:macro:`Py_DEBUG` macro defined produces +.. c:macro:: Py_DEBUG + +Compiling the interpreter with the :c:macro:`!Py_DEBUG` macro defined produces what is generally meant by :ref:`a debug build of Python `. -:c:macro:`Py_DEBUG` is enabled in the Unix build by adding +:c:macro:`!Py_DEBUG` is enabled in the Unix build by adding :option:`--with-pydebug` to the :file:`./configure` command. It is also implied by the presence of the -not-Python-specific :c:macro:`_DEBUG` macro. When :c:macro:`Py_DEBUG` is enabled +not-Python-specific :c:macro:`!_DEBUG` macro. When :c:macro:`!Py_DEBUG` is enabled in the Unix build, compiler optimization is disabled. In addition to the reference count debugging described below, extra checks are @@ -832,4 +834,3 @@ Please refer to :file:`Misc/SpecialBuilds.txt` in the Python source distribution for more detailed information. - diff -Nru python3.11-3.11.8/Doc/c-api/long.rst python3.11-3.11.9/Doc/c-api/long.rst --- python3.11-3.11.8/Doc/c-api/long.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/c-api/long.rst 2024-04-02 08:25:04.000000000 +0000 @@ -116,7 +116,7 @@ .. c:function:: long PyLong_AsLong(PyObject *obj) .. index:: - single: LONG_MAX + single: LONG_MAX (C macro) single: OverflowError (built-in exception) Return a C :c:expr:`long` representation of *obj*. If *obj* is not an @@ -201,7 +201,7 @@ .. c:function:: Py_ssize_t PyLong_AsSsize_t(PyObject *pylong) .. index:: - single: PY_SSIZE_T_MAX + single: PY_SSIZE_T_MAX (C macro) single: OverflowError (built-in exception) Return a C :c:type:`Py_ssize_t` representation of *pylong*. *pylong* must @@ -216,7 +216,7 @@ .. c:function:: unsigned long PyLong_AsUnsignedLong(PyObject *pylong) .. index:: - single: ULONG_MAX + single: ULONG_MAX (C macro) single: OverflowError (built-in exception) Return a C :c:expr:`unsigned long` representation of *pylong*. *pylong* @@ -232,7 +232,7 @@ .. c:function:: size_t PyLong_AsSize_t(PyObject *pylong) .. index:: - single: SIZE_MAX + single: SIZE_MAX (C macro) single: OverflowError (built-in exception) Return a C :c:type:`size_t` representation of *pylong*. *pylong* must be diff -Nru python3.11-3.11.8/Doc/c-api/memory.rst python3.11-3.11.9/Doc/c-api/memory.rst --- python3.11-3.11.8/Doc/c-api/memory.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/c-api/memory.rst 2024-04-02 08:25:04.000000000 +0000 @@ -41,10 +41,10 @@ API functions listed in this document. .. index:: - single: malloc() - single: calloc() - single: realloc() - single: free() + single: malloc (C function) + single: calloc (C function) + single: realloc (C function) + single: free (C function) To avoid memory corruption, extension writers should never try to operate on Python objects with the functions exported by the C library: :c:func:`malloc`, diff -Nru python3.11-3.11.8/Doc/c-api/structures.rst python3.11-3.11.9/Doc/c-api/structures.rst --- python3.11-3.11.8/Doc/c-api/structures.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/c-api/structures.rst 2024-04-02 08:25:04.000000000 +0000 @@ -570,13 +570,13 @@ +-------------+------------------+-----------------------------------+ | doc | const char \* | optional docstring | +-------------+------------------+-----------------------------------+ - | closure | void \* | optional function pointer, | + | closure | void \* | optional user data pointer, | | | | providing additional data for | | | | getter and setter | +-------------+------------------+-----------------------------------+ The ``get`` function takes one :c:expr:`PyObject*` parameter (the - instance) and a function pointer (the associated ``closure``):: + instance) and a user data pointer (the associated ``closure``):: typedef PyObject *(*getter)(PyObject *, void *); @@ -584,7 +584,7 @@ on failure. ``set`` functions take two :c:expr:`PyObject*` parameters (the instance and - the value to be set) and a function pointer (the associated ``closure``):: + the value to be set) and a user data pointer (the associated ``closure``):: typedef int (*setter)(PyObject *, PyObject *, void *); diff -Nru python3.11-3.11.8/Doc/c-api/sys.rst python3.11-3.11.9/Doc/c-api/sys.rst --- python3.11-3.11.8/Doc/c-api/sys.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/c-api/sys.rst 2024-04-02 08:25:04.000000000 +0000 @@ -5,6 +5,7 @@ Operating System Utilities ========================== + .. c:function:: PyObject* PyOS_FSPath(PyObject *path) Return the file system representation for *path*. If the object is a @@ -95,27 +96,30 @@ .. c:function:: int PyOS_CheckStack() + .. index:: single: USE_STACKCHECK (C macro) + Return true when the interpreter runs out of stack space. This is a reliable - check, but is only available when :c:macro:`USE_STACKCHECK` is defined (currently + check, but is only available when :c:macro:`!USE_STACKCHECK` is defined (currently on certain versions of Windows using the Microsoft Visual C++ compiler). - :c:macro:`USE_STACKCHECK` will be defined automatically; you should never + :c:macro:`!USE_STACKCHECK` will be defined automatically; you should never change the definition in your own code. +.. c:type:: void (*PyOS_sighandler_t)(int) + + .. c:function:: PyOS_sighandler_t PyOS_getsig(int i) Return the current signal handler for signal *i*. This is a thin wrapper around either :c:func:`!sigaction` or :c:func:`!signal`. Do not call those functions - directly! :c:type:`PyOS_sighandler_t` is a typedef alias for :c:expr:`void - (\*)(int)`. + directly! .. c:function:: PyOS_sighandler_t PyOS_setsig(int i, PyOS_sighandler_t h) Set the signal handler for signal *i* to be *h*; return the old signal handler. This is a thin wrapper around either :c:func:`!sigaction` or :c:func:`!signal`. Do - not call those functions directly! :c:type:`PyOS_sighandler_t` is a typedef - alias for :c:expr:`void (\*)(int)`. + not call those functions directly! .. c:function:: wchar_t* Py_DecodeLocale(const char* arg, size_t *size) @@ -378,10 +382,8 @@ silently abort the operation by raising an error subclassed from :class:`Exception` (other errors will not be silenced). - The hook function is of type :c:expr:`int (*)(const char *event, PyObject - *args, void *userData)`, where *args* is guaranteed to be a - :c:type:`PyTupleObject`. The hook function is always called with the GIL - held by the Python interpreter that raised the event. + The hook function is always called with the GIL held by the Python + interpreter that raised the event. See :pep:`578` for a detailed description of auditing. Functions in the runtime and standard library that raise events are listed in the @@ -390,12 +392,20 @@ .. audit-event:: sys.addaudithook "" c.PySys_AddAuditHook - If the interpreter is initialized, this function raises a auditing event + If the interpreter is initialized, this function raises an auditing event ``sys.addaudithook`` with no arguments. If any existing hooks raise an exception derived from :class:`Exception`, the new hook will not be added and the exception is cleared. As a result, callers cannot assume that their hook has been added unless they control all existing hooks. + .. c:namespace:: NULL + .. c:type:: int (*Py_AuditHookFunction) (const char *event, PyObject *args, void *userData) + + The type of the hook function. + *event* is the C string event argument passed to :c:func:`PySys_Audit`. + *args* is guaranteed to be a :c:type:`PyTupleObject`. + *userData* is the argument passed to PySys_AddAuditHook(). + .. versionadded:: 3.8 @@ -407,7 +417,7 @@ .. c:function:: void Py_FatalError(const char *message) - .. index:: single: abort() + .. index:: single: abort (C function) Print a fatal error message and kill the process. No cleanup is performed. This function should only be invoked when a condition is detected that would @@ -427,8 +437,8 @@ .. c:function:: void Py_Exit(int status) .. index:: - single: Py_FinalizeEx() - single: exit() + single: Py_FinalizeEx (C function) + single: exit (C function) Exit the current process. This calls :c:func:`Py_FinalizeEx` and then calls the standard C library function ``exit(status)``. If :c:func:`Py_FinalizeEx` @@ -441,7 +451,7 @@ .. c:function:: int Py_AtExit(void (*func) ()) .. index:: - single: Py_FinalizeEx() + single: Py_FinalizeEx (C function) single: cleanup functions Register a cleanup function to be called by :c:func:`Py_FinalizeEx`. The cleanup diff -Nru python3.11-3.11.8/Doc/c-api/unicode.rst python3.11-3.11.9/Doc/c-api/unicode.rst --- python3.11-3.11.8/Doc/c-api/unicode.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/c-api/unicode.rst 2024-04-02 08:25:04.000000000 +0000 @@ -950,7 +950,12 @@ Copy the Unicode object contents into the :c:type:`wchar_t` buffer *wstr*. At most *size* :c:type:`wchar_t` characters are copied (excluding a possibly trailing null termination character). Return the number of :c:type:`wchar_t` characters - copied or ``-1`` in case of an error. Note that the resulting :c:expr:`wchar_t*` + copied or ``-1`` in case of an error. + + When *wstr* is ``NULL``, instead return the *size* that would be required + to store all of *unicode* including a terminating null. + + Note that the resulting :c:expr:`wchar_t*` string may or may not be null-terminated. It is the responsibility of the caller to make sure that the :c:expr:`wchar_t*` string is null-terminated in case this is required by the application. Also, note that the :c:expr:`wchar_t*` string diff -Nru python3.11-3.11.8/Doc/c-api/utilities.rst python3.11-3.11.9/Doc/c-api/utilities.rst --- python3.11-3.11.8/Doc/c-api/utilities.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/c-api/utilities.rst 2024-04-02 08:25:04.000000000 +0000 @@ -17,5 +17,6 @@ marshal.rst arg.rst conversion.rst + hash.rst reflection.rst codec.rst diff -Nru python3.11-3.11.8/Doc/c-api/veryhigh.rst python3.11-3.11.9/Doc/c-api/veryhigh.rst --- python3.11-3.11.8/Doc/c-api/veryhigh.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/c-api/veryhigh.rst 2024-04-02 08:25:04.000000000 +0000 @@ -314,7 +314,7 @@ .. c:var:: int Py_eval_input - .. index:: single: Py_CompileString() + .. index:: single: Py_CompileString (C function) The start symbol from the Python grammar for isolated expressions; for use with :c:func:`Py_CompileString`. @@ -322,7 +322,7 @@ .. c:var:: int Py_file_input - .. index:: single: Py_CompileString() + .. index:: single: Py_CompileString (C function) The start symbol from the Python grammar for sequences of statements as read from a file or other source; for use with :c:func:`Py_CompileString`. This is @@ -331,7 +331,7 @@ .. c:var:: int Py_single_input - .. index:: single: Py_CompileString() + .. index:: single: Py_CompileString (C function) The start symbol from the Python grammar for a single statement; for use with :c:func:`Py_CompileString`. This is the symbol used for the interactive diff -Nru python3.11-3.11.8/Doc/conf.py python3.11-3.11.9/Doc/conf.py --- python3.11-3.11.8/Doc/conf.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/conf.py 2024-04-02 08:25:04.000000000 +0000 @@ -53,6 +53,10 @@ import patchlevel version, release = patchlevel.get_version_info() +rst_epilog = f""" +.. |python_version_literal| replace:: ``Python {version}`` +""" + # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: today = '' @@ -84,10 +88,13 @@ ('c:func', 'dlopen'), ('c:func', 'exec'), ('c:func', 'fcntl'), + ('c:func', 'flock'), ('c:func', 'fork'), ('c:func', 'free'), ('c:func', 'gettimeofday'), ('c:func', 'gmtime'), + ('c:func', 'grantpt'), + ('c:func', 'ioctl'), ('c:func', 'localeconv'), ('c:func', 'localtime'), ('c:func', 'main'), @@ -247,6 +254,7 @@ ('py:attr', '__annotations__'), ('py:meth', '__missing__'), ('py:attr', '__wrapped__'), + ('py:attr', 'decimal.Context.clamp'), ('py:meth', 'index'), # list.index, tuple.index, etc. ] diff -Nru python3.11-3.11.8/Doc/extending/extending.rst python3.11-3.11.9/Doc/extending/extending.rst --- python3.11-3.11.8/Doc/extending/extending.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/extending/extending.rst 2024-04-02 08:25:04.000000000 +0000 @@ -536,7 +536,7 @@ (but note that *temp* will not be ``NULL`` in this context). More info on them in section :ref:`refcounts`. -.. index:: single: PyObject_CallObject() +.. index:: single: PyObject_CallObject (C function) Later, when it is time to call the function, you call the C function :c:func:`PyObject_CallObject`. This function has two arguments, both pointers to @@ -627,7 +627,7 @@ Extracting Parameters in Extension Functions ============================================ -.. index:: single: PyArg_ParseTuple() +.. index:: single: PyArg_ParseTuple (C function) The :c:func:`PyArg_ParseTuple` function is declared as follows:: @@ -719,7 +719,7 @@ Keyword Parameters for Extension Functions ========================================== -.. index:: single: PyArg_ParseTupleAndKeywords() +.. index:: single: PyArg_ParseTupleAndKeywords (C function) The :c:func:`PyArg_ParseTupleAndKeywords` function is declared as follows:: diff -Nru python3.11-3.11.8/Doc/extending/newtypes.rst python3.11-3.11.9/Doc/extending/newtypes.rst --- python3.11-3.11.8/Doc/extending/newtypes.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/extending/newtypes.rst 2024-04-02 08:25:04.000000000 +0000 @@ -89,8 +89,8 @@ } .. index:: - single: PyErr_Fetch() - single: PyErr_Restore() + single: PyErr_Fetch (C function) + single: PyErr_Restore (C function) One important requirement of the deallocator function is that it leaves any pending exceptions alone. This is important since deallocators are frequently diff -Nru python3.11-3.11.8/Doc/faq/design.rst python3.11-3.11.9/Doc/faq/design.rst --- python3.11-3.11.8/Doc/faq/design.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/faq/design.rst 2024-04-02 08:25:04.000000000 +0000 @@ -258,9 +258,11 @@ Why isn't there a switch or case statement in Python? ----------------------------------------------------- -You can do this easily enough with a sequence of ``if... elif... elif... else``. -For literal values, or constants within a namespace, you can also use a -``match ... case`` statement. +In general, structured switch statements execute one block of code +when an expression has a particular value or set of values. +Since Python 3.10 one can easily match literal values, or constants +within a namespace, with a ``match ... case`` statement. +An older alternative is a sequence of ``if... elif... elif... else``. For cases where you need to choose from a very large number of possibilities, you can create a dictionary mapping case values to functions to call. For @@ -289,6 +291,9 @@ this example. Without such a prefix, if values are coming from an untrusted source, an attacker would be able to call any method on your object. +Imitating switch with fallthrough, as with C's switch-case-default, +is possible, much harder, and less needed. + Can't you emulate threads in the interpreter instead of relying on an OS-specific thread implementation? -------------------------------------------------------------------------------------------------------- diff -Nru python3.11-3.11.8/Doc/faq/extending.rst python3.11-3.11.9/Doc/faq/extending.rst --- python3.11-3.11.8/Doc/faq/extending.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/faq/extending.rst 2024-04-02 08:25:04.000000000 +0000 @@ -50,7 +50,7 @@ If you need to interface to some C or C++ library for which no Python extension currently exists, you can try wrapping the library's data types and functions with a tool such as `SWIG `_. `SIP -`__, `CXX +`__, `CXX `_ `Boost `_, or `Weave `_ are also diff -Nru python3.11-3.11.8/Doc/faq/general.rst python3.11-3.11.9/Doc/faq/general.rst --- python3.11-3.11.8/Doc/faq/general.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/faq/general.rst 2024-04-02 08:25:04.000000000 +0000 @@ -133,8 +133,6 @@ changes. * *C* is the micro version number -- it is incremented for each bugfix release. -See :pep:`6` for more information about bugfix releases. - Not all releases are bugfix releases. In the run-up to a new feature release, a series of development releases are made, denoted as alpha, beta, or release candidate. Alphas are early releases in which interfaces aren't yet finalized; @@ -157,7 +155,11 @@ practice, after a final minor release is made, the version is incremented to the next minor version, which becomes the "a0" version, e.g. "2.4a0". -See also the documentation for :data:`sys.version`, :data:`sys.hexversion`, and +See the `Developer's Guide +`__ +for more information about the development cycle, and +:pep:`387` to learn more about Python's backward compatibility policy. See also +the documentation for :data:`sys.version`, :data:`sys.hexversion`, and :data:`sys.version_info`. diff -Nru python3.11-3.11.8/Doc/glossary.rst python3.11-3.11.9/Doc/glossary.rst --- python3.11-3.11.8/Doc/glossary.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/glossary.rst 2024-04-02 08:25:04.000000000 +0000 @@ -726,18 +726,6 @@ thread removes *key* from *mapping* after the test, but before the lookup. This issue can be solved with locks or by using the EAFP approach. - locale encoding - On Unix, it is the encoding of the LC_CTYPE locale. It can be set with - :func:`locale.setlocale(locale.LC_CTYPE, new_locale) `. - - On Windows, it is the ANSI code page (ex: ``"cp1252"``). - - On Android and VxWorks, Python uses ``"utf-8"`` as the locale encoding. - - ``locale.getencoding()`` can be used to get the locale encoding. - - See also the :term:`filesystem encoding and error handler`. - list A built-in Python :term:`sequence`. Despite its name it is more akin to an array in other languages than to a linked list since access to @@ -757,6 +745,18 @@ :term:`finder`. See :pep:`302` for details and :class:`importlib.abc.Loader` for an :term:`abstract base class`. + locale encoding + On Unix, it is the encoding of the LC_CTYPE locale. It can be set with + :func:`locale.setlocale(locale.LC_CTYPE, new_locale) `. + + On Windows, it is the ANSI code page (ex: ``"cp1252"``). + + On Android and VxWorks, Python uses ``"utf-8"`` as the locale encoding. + + :func:`locale.getencoding` can be used to get the locale encoding. + + See also the :term:`filesystem encoding and error handler`. + magic method .. index:: pair: magic; method @@ -840,10 +840,11 @@ Some named tuples are built-in types (such as the above examples). Alternatively, a named tuple can be created from a regular class definition that inherits from :class:`tuple` and that defines named - fields. Such a class can be written by hand or it can be created with - the factory function :func:`collections.namedtuple`. The latter - technique also adds some extra methods that may not be found in - hand-written or built-in named tuples. + fields. Such a class can be written by hand, or it can be created by + inheriting :class:`typing.NamedTuple`, or with the factory function + :func:`collections.namedtuple`. The latter techniques also add some + extra methods that may not be found in hand-written or built-in named + tuples. namespace The place where a variable is stored. Namespaces are implemented as diff -Nru python3.11-3.11.8/Doc/howto/gdb_helpers.rst python3.11-3.11.9/Doc/howto/gdb_helpers.rst --- python3.11-3.11.8/Doc/howto/gdb_helpers.rst 1970-01-01 00:00:00.000000000 +0000 +++ python3.11-3.11.9/Doc/howto/gdb_helpers.rst 2024-04-02 08:25:04.000000000 +0000 @@ -0,0 +1,406 @@ +.. _gdb: + +========================================================= +Debugging C API extensions and CPython Internals with GDB +========================================================= + +.. highlight:: none + +This document explains how the Python GDB extension, ``python-gdb.py``, can +be used with the GDB debugger to debug CPython extensions and the +CPython interpreter itself. + +When debugging low-level problems such as crashes or deadlocks, a low-level +debugger, such as GDB, is useful to diagnose and correct the issue. +By default, GDB (or any of its front-ends) doesn't support high-level +information specific to the CPython interpreter. + +The ``python-gdb.py`` extension adds CPython interpreter information to GDB. +The extension helps introspect the stack of currently executing Python functions. +Given a Python object represented by a :c:expr:`PyObject *` pointer, +the extension surfaces the type and value of the object. + +Developers who are working on CPython extensions or tinkering with parts +of CPython that are written in C can use this document to learn how to use the +``python-gdb.py`` extension with GDB. + +.. note:: + + This document assumes that you are familiar with the basics of GDB and the + CPython C API. It consolidates guidance from the + `devguide `_ and the + `Python wiki `_. + + +Prerequisites +============= + +You need to have: + +- GDB 7 or later. (For earlier versions of GDB, see ``Misc/gdbinit`` in the + CPython sources. Note that this file will be removed in Python 3.12.) +- GDB-compatible debugging information for Python and any extension you are + debugging. +- The ``python-gdb.py`` extension. + +The extension is built with Python, but might be distributed separately or +not at all. Below, we include tips for a few common systems as examples. +Note that even if the instructions match your system, they might be outdated. + + +Setup with Python built from source +----------------------------------- + +When you build CPython from source, debugging information should be available, +and the build should add a ``python-gdb.py`` file to the root directory of +your repository. + +To activate support, you must add the directory containing ``python-gdb.py`` +to GDB's "auto-load-safe-path". +If you haven't done this, recent versions of GDB will print out a warning +with instructions on how to do this. + +.. note:: + + If you do not see instructions for your version of GDB, put this in your + configuration file (``~/.gdbinit`` or ``~/.config/gdb/gdbinit``):: + + add-auto-load-safe-path /path/to/cpython + + You can also add multiple paths, separated by ``:``. + + +Setup for Python from a Linux distro +------------------------------------ + +Most Linux systems provide debug information for the system Python +in a package called ``python-debuginfo``, ``python-dbg`` or similar. +For example: + +- Fedora: + + .. code-block:: shell + + sudo dnf install gdb + sudo dnf debuginfo-install python3 + +- Ubuntu: + + .. code-block:: shell + + sudo apt install gdb python3-dbg + +On several recent Linux systems, GDB can download debugging symbols +automatically using *debuginfod*. +However, this will not install the ``python-gdb.py`` extension; +you generally do need to install the debug info package separately. + + +Using the Debug build and Development mode +========================================== + +For easier debugging, you might want to: + +- Use a :ref:`debug build ` of Python. (When building from source, + use ``configure --with-pydebug``. On Linux distros, install and run a package + like ``python-debug`` or ``python-dbg``, if available.) +- Use the runtime :ref:`development mode ` (``-X dev``). + +Both enable extra assertions and disable some optimizations. +Sometimes this hides the bug you are trying to find, but in most cases they +make the process easier. + + +Using the ``python-gdb`` extension +================================== + +When the extension is loaded, it provides two main features: +pretty printers for Python values, and additional commands. + +Pretty-printers +--------------- + +This is what a GDB backtrace looks like (truncated) when this extension is +enabled:: + + #0 0x000000000041a6b1 in PyObject_Malloc (nbytes=Cannot access memory at address 0x7fffff7fefe8 + ) at Objects/obmalloc.c:748 + #1 0x000000000041b7c0 in _PyObject_DebugMallocApi (id=111 'o', nbytes=24) at Objects/obmalloc.c:1445 + #2 0x000000000041b717 in _PyObject_DebugMalloc (nbytes=24) at Objects/obmalloc.c:1412 + #3 0x000000000044060a in _PyUnicode_New (length=11) at Objects/unicodeobject.c:346 + #4 0x00000000004466aa in PyUnicodeUCS2_DecodeUTF8Stateful (s=0x5c2b8d "__lltrace__", size=11, errors=0x0, consumed= + 0x0) at Objects/unicodeobject.c:2531 + #5 0x0000000000446647 in PyUnicodeUCS2_DecodeUTF8 (s=0x5c2b8d "__lltrace__", size=11, errors=0x0) + at Objects/unicodeobject.c:2495 + #6 0x0000000000440d1b in PyUnicodeUCS2_FromStringAndSize (u=0x5c2b8d "__lltrace__", size=11) + at Objects/unicodeobject.c:551 + #7 0x0000000000440d94 in PyUnicodeUCS2_FromString (u=0x5c2b8d "__lltrace__") at Objects/unicodeobject.c:569 + #8 0x0000000000584abd in PyDict_GetItemString (v= + {'Yuck': , '__builtins__': , '__file__': 'Lib/test/crashers/nasty_eq_vs_dict.py', '__package__': None, 'y': , 'dict': {0: 0, 1: 1, 2: 2, 3: 3}, '__cached__': None, '__name__': '__main__', 'z': , '__doc__': None}, key= + 0x5c2b8d "__lltrace__") at Objects/dictobject.c:2171 + +Notice how the dictionary argument to ``PyDict_GetItemString`` is displayed +as its ``repr()``, rather than an opaque ``PyObject *`` pointer. + +The extension works by supplying a custom printing routine for values of type +``PyObject *``. If you need to access lower-level details of an object, then +cast the value to a pointer of the appropriate type. For example:: + + (gdb) p globals + $1 = {'__builtins__': , '__name__': + '__main__', 'ctypes': , '__doc__': None, + '__package__': None} + + (gdb) p *(PyDictObject*)globals + $2 = {ob_refcnt = 3, ob_type = 0x3dbdf85820, ma_fill = 5, ma_used = 5, + ma_mask = 7, ma_table = 0x63d0f8, ma_lookup = 0x3dbdc7ea70 + , ma_smalltable = {{me_hash = 7065186196740147912, + me_key = '__builtins__', me_value = }, + {me_hash = -368181376027291943, me_key = '__name__', + me_value ='__main__'}, {me_hash = 0, me_key = 0x0, me_value = 0x0}, + {me_hash = 0, me_key = 0x0, me_value = 0x0}, + {me_hash = -9177857982131165996, me_key = 'ctypes', + me_value = }, + {me_hash = -8518757509529533123, me_key = '__doc__', me_value = None}, + {me_hash = 0, me_key = 0x0, me_value = 0x0}, { + me_hash = 6614918939584953775, me_key = '__package__', me_value = None}}} + +Note that the pretty-printers do not actually call ``repr()``. +For basic types, they try to match its result closely. + +An area that can be confusing is that the custom printer for some types look a +lot like GDB's built-in printer for standard types. For example, the +pretty-printer for a Python ``int`` (:c:expr:`PyLongObject *`) +gives a representation that is not distinguishable from one of a +regular machine-level integer:: + + (gdb) p some_machine_integer + $3 = 42 + + (gdb) p some_python_integer + $4 = 42 + +The internal structure can be revealed with a cast to :c:expr:`PyLongObject *`: + + (gdb) p *(PyLongObject*)some_python_integer + $5 = {ob_base = {ob_base = {ob_refcnt = 8, ob_type = 0x3dad39f5e0}, ob_size = 1}, + ob_digit = {42}} + +A similar confusion can arise with the ``str`` type, where the output looks a +lot like gdb's built-in printer for ``char *``:: + + (gdb) p ptr_to_python_str + $6 = '__builtins__' + +The pretty-printer for ``str`` instances defaults to using single-quotes (as +does Python's ``repr`` for strings) whereas the standard printer for ``char *`` +values uses double-quotes and contains a hexadecimal address:: + + (gdb) p ptr_to_char_star + $7 = 0x6d72c0 "hello world" + +Again, the implementation details can be revealed with a cast to +:c:expr:`PyUnicodeObject *`:: + + (gdb) p *(PyUnicodeObject*)$6 + $8 = {ob_base = {ob_refcnt = 33, ob_type = 0x3dad3a95a0}, length = 12, + str = 0x7ffff2128500, hash = 7065186196740147912, state = 1, defenc = 0x0} + +``py-list`` +----------- + + The extension adds a ``py-list`` command, which + lists the Python source code (if any) for the current frame in the selected + thread. The current line is marked with a ">":: + + (gdb) py-list + 901 if options.profile: + 902 options.profile = False + 903 profile_me() + 904 return + 905 + >906 u = UI() + 907 if not u.quit: + 908 try: + 909 gtk.main() + 910 except KeyboardInterrupt: + 911 # properly quit on a keyboard interrupt... + + Use ``py-list START`` to list at a different line number within the Python + source, and ``py-list START,END`` to list a specific range of lines within + the Python source. + +``py-up`` and ``py-down`` +------------------------- + + The ``py-up`` and ``py-down`` commands are analogous to GDB's regular ``up`` + and ``down`` commands, but try to move at the level of CPython frames, rather + than C frames. + + GDB is not always able to read the relevant frame information, depending on + the optimization level with which CPython was compiled. Internally, the + commands look for C frames that are executing the default frame evaluation + function (that is, the core bytecode interpreter loop within CPython) and + look up the value of the related ``PyFrameObject *``. + + They emit the frame number (at the C level) within the thread. + + For example:: + + (gdb) py-up + #37 Frame 0x9420b04, for file /usr/lib/python2.6/site-packages/ + gnome_sudoku/main.py, line 906, in start_game () + u = UI() + (gdb) py-up + #40 Frame 0x948e82c, for file /usr/lib/python2.6/site-packages/ + gnome_sudoku/gnome_sudoku.py, line 22, in start_game(main=) + main.start_game() + (gdb) py-up + Unable to find an older python frame + + so we're at the top of the Python stack. + + The frame numbers correspond to those displayed by GDB's standard + ``backtrace`` command. + The command skips C frames which are not executing Python code. + + Going back down:: + + (gdb) py-down + #37 Frame 0x9420b04, for file /usr/lib/python2.6/site-packages/gnome_sudoku/main.py, line 906, in start_game () + u = UI() + (gdb) py-down + #34 (unable to read python frame information) + (gdb) py-down + #23 (unable to read python frame information) + (gdb) py-down + #19 (unable to read python frame information) + (gdb) py-down + #14 Frame 0x99262ac, for file /usr/lib/python2.6/site-packages/gnome_sudoku/game_selector.py, line 201, in run_swallowed_dialog (self=, puzzle=None, saved_games=[{'gsd.auto_fills': 0, 'tracking': {}, 'trackers': {}, 'notes': [], 'saved_at': 1270084485, 'game': '7 8 0 0 0 0 0 5 6 0 0 9 0 8 0 1 0 0 0 4 6 0 0 0 0 7 0 6 5 0 0 0 4 7 9 2 0 0 0 9 0 1 0 0 0 3 9 7 6 0 0 0 1 8 0 6 0 0 0 0 2 8 0 0 0 5 0 4 0 6 0 0 2 1 0 0 0 0 0 4 5\n7 8 0 0 0 0 0 5 6 0 0 9 0 8 0 1 0 0 0 4 6 0 0 0 0 7 0 6 5 1 8 3 4 7 9 2 0 0 0 9 0 1 0 0 0 3 9 7 6 0 0 0 1 8 0 6 0 0 0 0 2 8 0 0 0 5 0 4 0 6 0 0 2 1 0 0 0 0 0 4 5', 'gsd.impossible_hints': 0, 'timer.__absolute_start_time__': , 'gsd.hints': 0, 'timer.active_time': , 'timer.total_time': }], dialog=, saved_game_model=, sudoku_maker=, main_page=0) at remote 0x98fa6e4>, d=) + gtk.main() + (gdb) py-down + #8 (unable to read python frame information) + (gdb) py-down + Unable to find a newer python frame + + and we're at the bottom of the Python stack. + + +``py-bt`` +--------- + + The ``py-bt`` command attempts to display a Python-level backtrace of the + current thread. + + For example:: + + (gdb) py-bt + #8 (unable to read python frame information) + #11 Frame 0x9aead74, for file /usr/lib/python2.6/site-packages/gnome_sudoku/dialog_swallower.py, line 48, in run_dialog (self=, main_page=0) at remote 0x98fa6e4>, d=) + gtk.main() + #14 Frame 0x99262ac, for file /usr/lib/python2.6/site-packages/gnome_sudoku/game_selector.py, line 201, in run_swallowed_dialog (self=, puzzle=None, saved_games=[{'gsd.auto_fills': 0, 'tracking': {}, 'trackers': {}, 'notes': [], 'saved_at': 1270084485, 'game': '7 8 0 0 0 0 0 5 6 0 0 9 0 8 0 1 0 0 0 4 6 0 0 0 0 7 0 6 5 0 0 0 4 7 9 2 0 0 0 9 0 1 0 0 0 3 9 7 6 0 0 0 1 8 0 6 0 0 0 0 2 8 0 0 0 5 0 4 0 6 0 0 2 1 0 0 0 0 0 4 5\n7 8 0 0 0 0 0 5 6 0 0 9 0 8 0 1 0 0 0 4 6 0 0 0 0 7 0 6 5 1 8 3 4 7 9 2 0 0 0 9 0 1 0 0 0 3 9 7 6 0 0 0 1 8 0 6 0 0 0 0 2 8 0 0 0 5 0 4 0 6 0 0 2 1 0 0 0 0 0 4 5', 'gsd.impossible_hints': 0, 'timer.__absolute_start_time__': , 'gsd.hints': 0, 'timer.active_time': , 'timer.total_time': }], dialog=, saved_game_model=, sudoku_maker=) + main.start_game() + + The frame numbers correspond to those displayed by GDB's standard + ``backtrace`` command. + +``py-print`` +------------ + + The ``py-print`` command looks up a Python name and tries to print it. + It looks in locals within the current thread, then globals, then finally + builtins:: + + (gdb) py-print self + local 'self' = , + main_page=0) at remote 0x98fa6e4> + (gdb) py-print __name__ + global '__name__' = 'gnome_sudoku.dialog_swallower' + (gdb) py-print len + builtin 'len' = + (gdb) py-print scarlet_pimpernel + 'scarlet_pimpernel' not found + +``py-locals`` +------------- + + The ``py-locals`` command looks up all Python locals within the current + Python frame in the selected thread, and prints their representations:: + + (gdb) py-locals + self = , + main_page=0) at remote 0x98fa6e4> + d = + + +Use with GDB commands +===================== + +The extension commands complement GDB's built-in commands. +For example, you can use a frame numbers shown by ``py-bt`` with the ``frame`` +command to go a specific frame within the selected thread, like this:: + + (gdb) py-bt + (output snipped) + #68 Frame 0xaa4560, for file Lib/test/regrtest.py, line 1548, in () + main() + (gdb) frame 68 + #68 0x00000000004cd1e6 in PyEval_EvalFrameEx (f=Frame 0xaa4560, for file Lib/test/regrtest.py, line 1548, in (), throwflag=0) at Python/ceval.c:2665 + 2665 x = call_function(&sp, oparg); + (gdb) py-list + 1543 # Run the tests in a context manager that temporary changes the CWD to a + 1544 # temporary and writable directory. If it's not possible to create or + 1545 # change the CWD, the original CWD will be used. The original CWD is + 1546 # available from test_support.SAVEDCWD. + 1547 with test_support.temp_cwd(TESTCWD, quiet=True): + >1548 main() + +The ``info threads`` command will give you a list of the threads within the +process, and you can use the ``thread`` command to select a different one:: + + (gdb) info threads + 105 Thread 0x7fffefa18710 (LWP 10260) sem_wait () at ../nptl/sysdeps/unix/sysv/linux/x86_64/sem_wait.S:86 + 104 Thread 0x7fffdf5fe710 (LWP 10259) sem_wait () at ../nptl/sysdeps/unix/sysv/linux/x86_64/sem_wait.S:86 + * 1 Thread 0x7ffff7fe2700 (LWP 10145) 0x00000038e46d73e3 in select () at ../sysdeps/unix/syscall-template.S:82 + +You can use ``thread apply all COMMAND`` or (``t a a COMMAND`` for short) to run +a command on all threads. With ``py-bt``, this lets you see what every +thread is doing at the Python level:: + + (gdb) t a a py-bt + + Thread 105 (Thread 0x7fffefa18710 (LWP 10260)): + #5 Frame 0x7fffd00019d0, for file /home/david/coding/python-svn/Lib/threading.py, line 155, in _acquire_restore (self=<_RLock(_Verbose__verbose=False, _RLock__owner=140737354016512, _RLock__block=, _RLock__count=1) at remote 0xd7ff40>, count_owner=(1, 140737213728528), count=1, owner=140737213728528) + self.__block.acquire() + #8 Frame 0x7fffac001640, for file /home/david/coding/python-svn/Lib/threading.py, line 269, in wait (self=<_Condition(_Condition__lock=<_RLock(_Verbose__verbose=False, _RLock__owner=140737354016512, _RLock__block=, _RLock__count=1) at remote 0xd7ff40>, acquire=, _is_owned=, _release_save=, release=, _acquire_restore=, _Verbose__verbose=False, _Condition__waiters=[]) at remote 0xd7fd10>, timeout=None, waiter=, saved_state=(1, 140737213728528)) + self._acquire_restore(saved_state) + #12 Frame 0x7fffb8001a10, for file /home/david/coding/python-svn/Lib/test/lock_tests.py, line 348, in f () + cond.wait() + #16 Frame 0x7fffb8001c40, for file /home/david/coding/python-svn/Lib/test/lock_tests.py, line 37, in task (tid=140737213728528) + f() + + Thread 104 (Thread 0x7fffdf5fe710 (LWP 10259)): + #5 Frame 0x7fffe4001580, for file /home/david/coding/python-svn/Lib/threading.py, line 155, in _acquire_restore (self=<_RLock(_Verbose__verbose=False, _RLock__owner=140737354016512, _RLock__block=, _RLock__count=1) at remote 0xd7ff40>, count_owner=(1, 140736940992272), count=1, owner=140736940992272) + self.__block.acquire() + #8 Frame 0x7fffc8002090, for file /home/david/coding/python-svn/Lib/threading.py, line 269, in wait (self=<_Condition(_Condition__lock=<_RLock(_Verbose__verbose=False, _RLock__owner=140737354016512, _RLock__block=, _RLock__count=1) at remote 0xd7ff40>, acquire=, _is_owned=, _release_save=, release=, _acquire_restore=, _Verbose__verbose=False, _Condition__waiters=[]) at remote 0xd7fd10>, timeout=None, waiter=, saved_state=(1, 140736940992272)) + self._acquire_restore(saved_state) + #12 Frame 0x7fffac001c90, for file /home/david/coding/python-svn/Lib/test/lock_tests.py, line 348, in f () + cond.wait() + #16 Frame 0x7fffac0011c0, for file /home/david/coding/python-svn/Lib/test/lock_tests.py, line 37, in task (tid=140736940992272) + f() + + Thread 1 (Thread 0x7ffff7fe2700 (LWP 10145)): + #5 Frame 0xcb5380, for file /home/david/coding/python-svn/Lib/test/lock_tests.py, line 16, in _wait () + time.sleep(0.01) + #8 Frame 0x7fffd00024a0, for file /home/david/coding/python-svn/Lib/test/lock_tests.py, line 378, in _check_notify (self=, skipped=[], _mirrorOutput=False, testsRun=39, buffer=False, _original_stderr=, _stdout_buffer=, _stderr_buffer=, _moduleSetUpFailed=False, expectedFailures=[], errors=[], _previousTestClass=, unexpectedSuccesses=[], failures=[], shouldStop=False, failfast=False) at remote 0xc185a0>, _threads=(0,), _cleanups=[], _type_equality_funcs={: , : , : , : , `_, which is a Python binding to +`NNG `_, billed as a spiritual successor to ZeroMQ. +The following snippets illustrate -- you can test them in an environment which has +``pynng`` installed. Juat for variety, we present the listener first. - Module :mod:`logging` - API reference for the logging module. - Module :mod:`logging.config` - Configuration API for the logging module. +Subclass ``QueueListener`` +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. code-block:: python + + import json + import logging + import logging.handlers - Module :mod:`logging.handlers` - Useful handlers included with the logging module. + import pynng - :ref:`A basic logging tutorial ` + DEFAULT_ADDR = "tcp://localhost:13232" - :ref:`A more advanced logging tutorial ` + interrupted = False + class NNGSocketListener(logging.handlers.QueueListener): + + def __init__(self, uri, /, *handlers, **kwargs): + # Have a timeout for interruptability, and open a + # subscriber socket + socket = pynng.Sub0(listen=uri, recv_timeout=500) + # The b'' subscription matches all topics + topics = kwargs.pop('topics', None) or b'' + socket.subscribe(topics) + # We treat the socket as a queue + super().__init__(socket, *handlers, **kwargs) + + def dequeue(self, block): + data = None + # Keep looping while not interrupted and no data received over the + # socket + while not interrupted: + try: + data = self.queue.recv(block=block) + break + except pynng.Timeout: + pass + except pynng.Closed: # sometimes hit when you hit Ctrl-C + break + if data is None: + return None + # Get the logging event sent from a publisher + event = json.loads(data.decode('utf-8')) + return logging.makeLogRecord(event) + + def enqueue_sentinel(self): + # Not used in this implementation, as the socket isn't really a + # queue + pass + + logging.getLogger('pynng').propagate = False + listener = NNGSocketListener(DEFAULT_ADDR, logging.StreamHandler(), topics=b'') + listener.start() + print('Press Ctrl-C to stop.') + try: + while True: + pass + except KeyboardInterrupt: + interrupted = True + finally: + listener.stop() + + +Subclass ``QueueHandler`` +^^^^^^^^^^^^^^^^^^^^^^^^^ .. currentmodule:: logging +.. code-block:: python + + import json + import logging + import logging.handlers + import time + import random + + import pynng + + DEFAULT_ADDR = "tcp://localhost:13232" + + class NNGSocketHandler(logging.handlers.QueueHandler): + + def __init__(self, uri): + socket = pynng.Pub0(dial=uri, send_timeout=500) + super().__init__(socket) + + def enqueue(self, record): + # Send the record as UTF-8 encoded JSON + d = dict(record.__dict__) + data = json.dumps(d) + self.queue.send(data.encode('utf-8')) + + def close(self): + self.queue.close() + + logging.getLogger('pynng').propagate = False + handler = NNGSocketHandler(DEFAULT_ADDR) + logging.basicConfig(level=logging.DEBUG, + handlers=[logging.StreamHandler(), handler], + format='%(levelname)-8s %(name)10s %(message)s') + levels = (logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR, + logging.CRITICAL) + logger_names = ('myapp', 'myapp.lib1', 'myapp.lib2') + msgno = 1 + while True: + # Just randomly select some loggers and levels and log away + level = random.choice(levels) + logger = logging.getLogger(random.choice(logger_names)) + logger.log(level, 'Message no. %5d' % msgno) + msgno += 1 + delay = random.random() * 2 + 0.5 + time.sleep(delay) + +You can run the above two snippets in separate command shells. + + An example dictionary-based configuration ----------------------------------------- @@ -3420,9 +3530,10 @@ :mod:`threading` module, as there are circumstances where one has to use ``QThread``, which offers better integration with other ``Qt`` components. -The code should work with recent releases of either ``PySide2`` or ``PyQt5``. -You should be able to adapt the approach to earlier versions of Qt. Please -refer to the comments in the code snippet for more detailed information. +The code should work with recent releases of any of ``PySide6``, ``PyQt6``, +``PySide2`` or ``PyQt5``. You should be able to adapt the approach to earlier +versions of Qt. Please refer to the comments in the code snippet for more +detailed information. .. code-block:: python3 @@ -3432,16 +3543,25 @@ import sys import time - # Deal with minor differences between PySide2 and PyQt5 + # Deal with minor differences between different Qt packages try: - from PySide2 import QtCore, QtGui, QtWidgets + from PySide6 import QtCore, QtGui, QtWidgets Signal = QtCore.Signal Slot = QtCore.Slot except ImportError: - from PyQt5 import QtCore, QtGui, QtWidgets - Signal = QtCore.pyqtSignal - Slot = QtCore.pyqtSlot - + try: + from PyQt6 import QtCore, QtGui, QtWidgets + Signal = QtCore.pyqtSignal + Slot = QtCore.pyqtSlot + except ImportError: + try: + from PySide2 import QtCore, QtGui, QtWidgets + Signal = QtCore.Signal + Slot = QtCore.Slot + except ImportError: + from PyQt5 import QtCore, QtGui, QtWidgets + Signal = QtCore.pyqtSignal + Slot = QtCore.pyqtSlot logger = logging.getLogger(__name__) @@ -3513,8 +3633,14 @@ while not QtCore.QThread.currentThread().isInterruptionRequested(): delay = 0.5 + random.random() * 2 time.sleep(delay) - level = random.choice(LEVELS) - logger.log(level, 'Message after delay of %3.1f: %d', delay, i, extra=extra) + try: + if random.random() < 0.1: + raise ValueError('Exception raised: %d' % i) + else: + level = random.choice(LEVELS) + logger.log(level, 'Message after delay of %3.1f: %d', delay, i, extra=extra) + except ValueError as e: + logger.exception('Failed: %s', e, extra=extra) i += 1 # @@ -3541,7 +3667,10 @@ self.textedit = te = QtWidgets.QPlainTextEdit(self) # Set whatever the default monospace font is for the platform f = QtGui.QFont('nosuchfont') - f.setStyleHint(f.Monospace) + if hasattr(f, 'Monospace'): + f.setStyleHint(f.Monospace) + else: + f.setStyleHint(f.StyleHint.Monospace) # for Qt6 te.setFont(f) te.setReadOnly(True) PB = QtWidgets.QPushButton @@ -3628,7 +3757,11 @@ app = QtWidgets.QApplication(sys.argv) example = Window(app) example.show() - sys.exit(app.exec_()) + if hasattr(app, 'exec'): + rc = app.exec() + else: + rc = app.exec_() + sys.exit(rc) if __name__=='__main__': main() diff -Nru python3.11-3.11.8/Doc/howto/logging.rst python3.11-3.11.9/Doc/howto/logging.rst --- python3.11-3.11.8/Doc/howto/logging.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/howto/logging.rst 2024-04-02 08:25:04.000000000 +0000 @@ -22,10 +22,12 @@ When to use logging ^^^^^^^^^^^^^^^^^^^ -Logging provides a set of convenience functions for simple logging usage. These -are :func:`debug`, :func:`info`, :func:`warning`, :func:`error` and -:func:`critical`. To determine when to use logging, see the table below, which -states, for each of a set of common tasks, the best tool to use for it. +You can access logging functionality by creating a logger via ``logger = +getLogger(__name__)``, and then calling the logger's :meth:`~Logger.debug`, +:meth:`~Logger.info`, :meth:`~Logger.warning`, :meth:`~Logger.error` and +:meth:`~Logger.critical` methods. To determine when to use logging, and to see +which logger methods to use when, see the table below. It states, for each of a +set of common tasks, the best tool to use for that task. +-------------------------------------+--------------------------------------+ | Task you want to perform | The best tool for the task | @@ -34,8 +36,8 @@ | usage of a command line script or | | | program | | +-------------------------------------+--------------------------------------+ -| Report events that occur during | :func:`logging.info` (or | -| normal operation of a program (e.g. | :func:`logging.debug` for very | +| Report events that occur during | A logger's :meth:`~Logger.info` (or | +| normal operation of a program (e.g. | :meth:`~Logger.debug` method for very| | for status monitoring or fault | detailed output for diagnostic | | investigation) | purposes) | +-------------------------------------+--------------------------------------+ @@ -44,22 +46,23 @@ | | the client application should be | | | modified to eliminate the warning | | | | -| | :func:`logging.warning` if there is | -| | nothing the client application can do| -| | about the situation, but the event | -| | should still be noted | +| | A logger's :meth:`~Logger.warning` | +| | method if there is nothing the client| +| | application can do about the | +| | situation, but the event should still| +| | be noted | +-------------------------------------+--------------------------------------+ | Report an error regarding a | Raise an exception | | particular runtime event | | +-------------------------------------+--------------------------------------+ -| Report suppression of an error | :func:`logging.error`, | -| without raising an exception (e.g. | :func:`logging.exception` or | -| error handler in a long-running | :func:`logging.critical` as | +| Report suppression of an error | A logger's :meth:`~Logger.error`, | +| without raising an exception (e.g. | :meth:`~Logger.exception` or | +| error handler in a long-running | :meth:`~Logger.critical` method as | | server process) | appropriate for the specific error | | | and application domain | +-------------------------------------+--------------------------------------+ -The logging functions are named after the level or severity of the events +The logger methods are named after the level or severity of the events they are used to track. The standard levels and their applicability are described below (in increasing order of severity): @@ -113,12 +116,18 @@ WARNING:root:Watch out! printed out on the console. The ``INFO`` message doesn't appear because the -default level is ``WARNING``. The printed message includes the indication of -the level and the description of the event provided in the logging call, i.e. -'Watch out!'. Don't worry about the 'root' part for now: it will be explained -later. The actual output can be formatted quite flexibly if you need that; -formatting options will also be explained later. - +default level is ``WARNING``. The printed message includes the indication of the +level and the description of the event provided in the logging call, i.e. +'Watch out!'. The actual output can be formatted quite flexibly if you need +that; formatting options will also be explained later. + +Notice that in this example, we use functions directly on the ``logging`` +module, like ``logging.debug``, rather than creating a logger and calling +functions on it. These functions operation on the root logger, but can be useful +as they will call :func:`~logging.basicConfig` for you if it has not been called yet, like in +this example. In larger programs you'll usually want to control the logging +configuration explicitly however - so for that reason as well as others, it's +better to create loggers and call their methods. Logging to a file ^^^^^^^^^^^^^^^^^ @@ -128,11 +137,12 @@ interpreter, and don't just continue from the session described above:: import logging + logger = logging.getLogger(__name__) logging.basicConfig(filename='example.log', encoding='utf-8', level=logging.DEBUG) - logging.debug('This message should go to the log file') - logging.info('So should this') - logging.warning('And this, too') - logging.error('And non-ASCII stuff, too, like Øresund and Malmö') + logger.debug('This message should go to the log file') + logger.info('So should this') + logger.warning('And this, too') + logger.error('And non-ASCII stuff, too, like Øresund and Malmö') .. versionchanged:: 3.9 The *encoding* argument was added. In earlier Python versions, or if not @@ -146,10 +156,10 @@ .. code-block:: none - DEBUG:root:This message should go to the log file - INFO:root:So should this - WARNING:root:And this, too - ERROR:root:And non-ASCII stuff, too, like Øresund and Malmö + DEBUG:__main__:This message should go to the log file + INFO:__main__:So should this + WARNING:__main__:And this, too + ERROR:__main__:And non-ASCII stuff, too, like Øresund and Malmö This example also shows how you can set the logging level which acts as the threshold for tracking. In this case, because we set the threshold to @@ -178,11 +188,9 @@ raise ValueError('Invalid log level: %s' % loglevel) logging.basicConfig(level=numeric_level, ...) -The call to :func:`basicConfig` should come *before* any calls to -:func:`debug`, :func:`info`, etc. Otherwise, those functions will call -:func:`basicConfig` for you with the default options. As it's intended as a -one-off simple configuration facility, only the first call will actually do -anything: subsequent calls are effectively no-ops. +The call to :func:`basicConfig` should come *before* any calls to a logger's +methods such as :meth:`~Logger.debug`, :meth:`~Logger.info`, etc. Otherwise, +that logging event may not be handled in the desired manner. If you run the above script several times, the messages from successive runs are appended to the file *example.log*. If you want each run to start afresh, @@ -195,50 +203,6 @@ to, so the messages from earlier runs are lost. -Logging from multiple modules -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -If your program consists of multiple modules, here's an example of how you -could organize logging in it:: - - # myapp.py - import logging - import mylib - - def main(): - logging.basicConfig(filename='myapp.log', level=logging.INFO) - logging.info('Started') - mylib.do_something() - logging.info('Finished') - - if __name__ == '__main__': - main() - -:: - - # mylib.py - import logging - - def do_something(): - logging.info('Doing something') - -If you run *myapp.py*, you should see this in *myapp.log*: - -.. code-block:: none - - INFO:root:Started - INFO:root:Doing something - INFO:root:Finished - -which is hopefully what you were expecting to see. You can generalize this to -multiple modules, using the pattern in *mylib.py*. Note that for this simple -usage pattern, you won't know, by looking in the log file, *where* in your -application your messages came from, apart from looking at the event -description. If you want to track the location of your messages, you'll need -to refer to the documentation beyond the tutorial level -- see -:ref:`logging-advanced-tutorial`. - - Logging variable data ^^^^^^^^^^^^^^^^^^^^^ diff -Nru python3.11-3.11.8/Doc/howto/pyporting.rst python3.11-3.11.9/Doc/howto/pyporting.rst --- python3.11-3.11.8/Doc/howto/pyporting.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/howto/pyporting.rst 2024-04-02 08:25:04.000000000 +0000 @@ -1,3 +1,5 @@ +:orphan: + .. _pyporting-howto: ************************************* @@ -6,423 +8,30 @@ :author: Brett Cannon -.. topic:: Abstract +Python 2 reached its official end-of-life at the start of 2020. This means +that no new bug reports, fixes, or changes will be made to Python 2 - it's +no longer supported: see :pep:`373` and +`status of Python versions `_. - Python 2 reached its official end-of-life at the start of 2020. This means - that no new bug reports, fixes, or changes will be made to Python 2 - it's - no longer supported. - - This guide is intended to provide you with a path to Python 3 for your - code, that includes compatibility with Python 2 as a first step. - - If you are looking to port an extension module instead of pure Python code, - please see :ref:`cporting-howto`. - - The archived python-porting_ mailing list may contain some useful guidance. - - -The Short Explanation -===================== - -To achieve Python 2/3 compatibility in a single code base, the basic steps -are: - -#. Only worry about supporting Python 2.7 -#. Make sure you have good test coverage (coverage.py_ can help; - ``python -m pip install coverage``) -#. Learn the differences between Python 2 and 3 -#. Use Futurize_ (or Modernize_) to update your code (e.g. ``python -m pip install future``) -#. Use Pylint_ to help make sure you don't regress on your Python 3 support - (``python -m pip install pylint``) -#. Use caniusepython3_ to find out which of your dependencies are blocking your - use of Python 3 (``python -m pip install caniusepython3``) -#. Once your dependencies are no longer blocking you, use continuous integration - to make sure you stay compatible with Python 2 and 3 (tox_ can help test - against multiple versions of Python; ``python -m pip install tox``) -#. Consider using optional :term:`static type checking ` - to make sure your type usage - works in both Python 2 and 3 (e.g. use mypy_ to check your typing under both - Python 2 and Python 3; ``python -m pip install mypy``). - -.. note:: - - Note: Using ``python -m pip install`` guarantees that the ``pip`` you invoke - is the one installed for the Python currently in use, whether it be - a system-wide ``pip`` or one installed within a - :ref:`virtual environment `. - -Details -======= - -Even if other factors - say, dependencies over which you have no control - -still require you to support Python 2, that does not prevent you taking the -step of including Python 3 support. - -Most changes required to support Python 3 lead to cleaner code using newer -practices even in Python 2 code. - - -Different versions of Python 2 ------------------------------- - -Ideally, your code should be compatible with Python 2.7, which was the -last supported version of Python 2. - -Some of the tools mentioned in this guide will not work with Python 2.6. - -If absolutely necessary, the six_ project can help you support Python 2.5 and -3 simultaneously. Do realize, though, that nearly all the projects listed in -this guide will not be available to you. - -If you are able to skip Python 2.5 and older, the required changes to your -code will be minimal. At worst you will have to use a function instead of a -method in some instances or have to import a function instead of using a -built-in one. - - -Make sure you specify the proper version support in your ``setup.py`` file --------------------------------------------------------------------------- - -In your ``setup.py`` file you should have the proper `trove classifier`_ -specifying what versions of Python you support. As your project does not support -Python 3 yet you should at least have -``Programming Language :: Python :: 2 :: Only`` specified. Ideally you should -also specify each major/minor version of Python that you do support, e.g. -``Programming Language :: Python :: 2.7``. - - -Have good test coverage ------------------------ - -Once you have your code supporting the oldest version of Python 2 you want it -to, you will want to make sure your test suite has good coverage. A good rule of -thumb is that if you want to be confident enough in your test suite that any -failures that appear after having tools rewrite your code are actual bugs in the -tools and not in your code. If you want a number to aim for, try to get over 80% -coverage (and don't feel bad if you find it hard to get better than 90% -coverage). If you don't already have a tool to measure test coverage then -coverage.py_ is recommended. - - -Be aware of the differences between Python 2 and 3 --------------------------------------------------- - -Once you have your code well-tested you are ready to begin porting your code to -Python 3! But to fully understand how your code is going to change and what -you want to look out for while you code, you will want to learn what changes -Python 3 makes in terms of Python 2. - -Some resources for understanding the differences and their implications for you -code: - -* the :ref:`"What's New" ` doc for each release of Python 3 -* the `Porting to Python 3`_ book (which is free online) -* the handy `cheat sheet`_ from the Python-Future project. - - -Update your code ----------------- - -There are tools available that can port your code automatically. - -Futurize_ does its best to make Python 3 idioms and practices exist in Python -2, e.g. backporting the ``bytes`` type from Python 3 so that you have -semantic parity between the major versions of Python. This is the better -approach for most cases. - -Modernize_, on the other hand, is more conservative and targets a Python 2/3 -subset of Python, directly relying on six_ to help provide compatibility. - -A good approach is to run the tool over your test suite first and visually -inspect the diff to make sure the transformation is accurate. After you have -transformed your test suite and verified that all the tests still pass as -expected, then you can transform your application code knowing that any tests -which fail is a translation failure. - -Unfortunately the tools can't automate everything to make your code work under -Python 3, and you will also need to read the tools' documentation in case some -options you need are turned off by default. - -Key issues to be aware of and check for: - -Division -++++++++ - -In Python 3, ``5 / 2 == 2.5`` and not ``2`` as it was in Python 2; all -division between ``int`` values result in a ``float``. This change has -actually been planned since Python 2.2 which was released in 2002. Since then -users have been encouraged to add ``from __future__ import division`` to any -and all files which use the ``/`` and ``//`` operators or to be running the -interpreter with the ``-Q`` flag. If you have not been doing this then you -will need to go through your code and do two things: - -#. Add ``from __future__ import division`` to your files -#. Update any division operator as necessary to either use ``//`` to use floor - division or continue using ``/`` and expect a float - -The reason that ``/`` isn't simply translated to ``//`` automatically is that if -an object defines a ``__truediv__`` method but not ``__floordiv__`` then your -code would begin to fail (e.g. a user-defined class that uses ``/`` to -signify some operation but not ``//`` for the same thing or at all). - - -Text versus binary data -+++++++++++++++++++++++ - -In Python 2 you could use the ``str`` type for both text and binary data. -Unfortunately this confluence of two different concepts could lead to brittle -code which sometimes worked for either kind of data, sometimes not. It also -could lead to confusing APIs if people didn't explicitly state that something -that accepted ``str`` accepted either text or binary data instead of one -specific type. This complicated the situation especially for anyone supporting -multiple languages as APIs wouldn't bother explicitly supporting ``unicode`` -when they claimed text data support. - -Python 3 made text and binary data distinct types that cannot simply be mixed -together. For any code that deals only with text or only binary data, this -separation doesn't pose an issue. But for code that has to deal with both, it -does mean you might have to now care about when you are using text compared -to binary data, which is why this cannot be entirely automated. - -Decide which APIs take text and which take binary (it is **highly** recommended -you don't design APIs that can take both due to the difficulty of keeping the -code working; as stated earlier it is difficult to do well). In Python 2 this -means making sure the APIs that take text can work with ``unicode`` and those -that work with binary data work with the ``bytes`` type from Python 3 -(which is a subset of ``str`` in Python 2 and acts as an alias for ``bytes`` -type in Python 2). Usually the biggest issue is realizing which methods exist -on which types in Python 2 and 3 simultaneously (for text that's ``unicode`` -in Python 2 and ``str`` in Python 3, for binary that's ``str``/``bytes`` in -Python 2 and ``bytes`` in Python 3). - -The following table lists the **unique** methods of each data type across -Python 2 and 3 (e.g., the ``decode()`` method is usable on the equivalent binary -data type in either Python 2 or 3, but it can't be used by the textual data -type consistently between Python 2 and 3 because ``str`` in Python 3 doesn't -have the method). Do note that as of Python 3.5 the ``__mod__`` method was -added to the bytes type. - -======================== ===================== -**Text data** **Binary data** ------------------------- --------------------- -\ decode ------------------------- --------------------- -encode ------------------------- --------------------- -format ------------------------- --------------------- -isdecimal ------------------------- --------------------- -isnumeric -======================== ===================== - -Making the distinction easier to handle can be accomplished by encoding and -decoding between binary data and text at the edge of your code. This means that -when you receive text in binary data, you should immediately decode it. And if -your code needs to send text as binary data then encode it as late as possible. -This allows your code to work with only text internally and thus eliminates -having to keep track of what type of data you are working with. - -The next issue is making sure you know whether the string literals in your code -represent text or binary data. You should add a ``b`` prefix to any -literal that presents binary data. For text you should add a ``u`` prefix to -the text literal. (There is a :mod:`__future__` import to force all unspecified -literals to be Unicode, but usage has shown it isn't as effective as adding a -``b`` or ``u`` prefix to all literals explicitly) - -You also need to be careful about opening files. Possibly you have not always -bothered to add the ``b`` mode when opening a binary file (e.g., ``rb`` for -binary reading). Under Python 3, binary files and text files are clearly -distinct and mutually incompatible; see the :mod:`io` module for details. -Therefore, you **must** make a decision of whether a file will be used for -binary access (allowing binary data to be read and/or written) or textual access -(allowing text data to be read and/or written). You should also use :func:`io.open` -for opening files instead of the built-in :func:`open` function as the :mod:`io` -module is consistent from Python 2 to 3 while the built-in :func:`open` function -is not (in Python 3 it's actually :func:`io.open`). Do not bother with the -outdated practice of using :func:`codecs.open` as that's only necessary for -keeping compatibility with Python 2.5. - -The constructors of both ``str`` and ``bytes`` have different semantics for the -same arguments between Python 2 and 3. Passing an integer to ``bytes`` in Python 2 -will give you the string representation of the integer: ``bytes(3) == '3'``. -But in Python 3, an integer argument to ``bytes`` will give you a bytes object -as long as the integer specified, filled with null bytes: -``bytes(3) == b'\x00\x00\x00'``. A similar worry is necessary when passing a -bytes object to ``str``. In Python 2 you just get the bytes object back: -``str(b'3') == b'3'``. But in Python 3 you get the string representation of the -bytes object: ``str(b'3') == "b'3'"``. - -Finally, the indexing of binary data requires careful handling (slicing does -**not** require any special handling). In Python 2, -``b'123'[1] == b'2'`` while in Python 3 ``b'123'[1] == 50``. Because binary data -is simply a collection of binary numbers, Python 3 returns the integer value for -the byte you index on. But in Python 2 because ``bytes == str``, indexing -returns a one-item slice of bytes. The six_ project has a function -named ``six.indexbytes()`` which will return an integer like in Python 3: -``six.indexbytes(b'123', 1)``. - -To summarize: - -#. Decide which of your APIs take text and which take binary data -#. Make sure that your code that works with text also works with ``unicode`` and - code for binary data works with ``bytes`` in Python 2 (see the table above - for what methods you cannot use for each type) -#. Mark all binary literals with a ``b`` prefix, textual literals with a ``u`` - prefix -#. Decode binary data to text as soon as possible, encode text as binary data as - late as possible -#. Open files using :func:`io.open` and make sure to specify the ``b`` mode when - appropriate -#. Be careful when indexing into binary data - - -Use feature detection instead of version detection -++++++++++++++++++++++++++++++++++++++++++++++++++ - -Inevitably you will have code that has to choose what to do based on what -version of Python is running. The best way to do this is with feature detection -of whether the version of Python you're running under supports what you need. -If for some reason that doesn't work then you should make the version check be -against Python 2 and not Python 3. To help explain this, let's look at an -example. - -Let's pretend that you need access to a feature of :mod:`importlib` that -is available in Python's standard library since Python 3.3 and available for -Python 2 through importlib2_ on PyPI. You might be tempted to write code to -access e.g. the :mod:`importlib.abc` module by doing the following:: - - import sys - - if sys.version_info[0] == 3: - from importlib import abc - else: - from importlib2 import abc - -The problem with this code is what happens when Python 4 comes out? It would -be better to treat Python 2 as the exceptional case instead of Python 3 and -assume that future Python versions will be more compatible with Python 3 than -Python 2:: - - import sys - - if sys.version_info[0] > 2: - from importlib import abc - else: - from importlib2 import abc - -The best solution, though, is to do no version detection at all and instead rely -on feature detection. That avoids any potential issues of getting the version -detection wrong and helps keep you future-compatible:: - - try: - from importlib import abc - except ImportError: - from importlib2 import abc - - -Prevent compatibility regressions ---------------------------------- - -Once you have fully translated your code to be compatible with Python 3, you -will want to make sure your code doesn't regress and stop working under -Python 3. This is especially true if you have a dependency which is blocking you -from actually running under Python 3 at the moment. - -To help with staying compatible, any new modules you create should have -at least the following block of code at the top of it:: - - from __future__ import absolute_import - from __future__ import division - from __future__ import print_function - -You can also run Python 2 with the ``-3`` flag to be warned about various -compatibility issues your code triggers during execution. If you turn warnings -into errors with ``-Werror`` then you can make sure that you don't accidentally -miss a warning. - -You can also use the Pylint_ project and its ``--py3k`` flag to lint your code -to receive warnings when your code begins to deviate from Python 3 -compatibility. This also prevents you from having to run Modernize_ or Futurize_ -over your code regularly to catch compatibility regressions. This does require -you only support Python 2.7 and Python 3.4 or newer as that is Pylint's -minimum Python version support. - - -Check which dependencies block your transition ----------------------------------------------- - -**After** you have made your code compatible with Python 3 you should begin to -care about whether your dependencies have also been ported. The caniusepython3_ -project was created to help you determine which projects --- directly or indirectly -- are blocking you from supporting Python 3. There -is both a command-line tool as well as a web interface at -https://caniusepython3.com. - -The project also provides code which you can integrate into your test suite so -that you will have a failing test when you no longer have dependencies blocking -you from using Python 3. This allows you to avoid having to manually check your -dependencies and to be notified quickly when you can start running on Python 3. - - -Update your ``setup.py`` file to denote Python 3 compatibility --------------------------------------------------------------- - -Once your code works under Python 3, you should update the classifiers in -your ``setup.py`` to contain ``Programming Language :: Python :: 3`` and to not -specify sole Python 2 support. This will tell anyone using your code that you -support Python 2 **and** 3. Ideally you will also want to add classifiers for -each major/minor version of Python you now support. - - -Use continuous integration to stay compatible ---------------------------------------------- - -Once you are able to fully run under Python 3 you will want to make sure your -code always works under both Python 2 and 3. Probably the best tool for running -your tests under multiple Python interpreters is tox_. You can then integrate -tox with your continuous integration system so that you never accidentally break -Python 2 or 3 support. - -You may also want to use the ``-bb`` flag with the Python 3 interpreter to -trigger an exception when you are comparing bytes to strings or bytes to an int -(the latter is available starting in Python 3.5). By default type-differing -comparisons simply return ``False``, but if you made a mistake in your -separation of text/binary data handling or indexing on bytes you wouldn't easily -find the mistake. This flag will raise an exception when these kinds of -comparisons occur, making the mistake much easier to track down. - - -Consider using optional static type checking --------------------------------------------- - -Another way to help port your code is to use a :term:`static type checker` like -mypy_ or pytype_ on your code. These tools can be used to analyze your code as -if it's being run under Python 2, then you can run the tool a second time as if -your code is running under Python 3. By running a static type checker twice like -this you can discover if you're e.g. misusing binary data type in one version -of Python compared to another. If you add optional type hints to your code you -can also explicitly state whether your APIs use textual or binary data, helping -to make sure everything functions as expected in both versions of Python. - - -.. _caniusepython3: https://pypi.org/project/caniusepython3 -.. _cheat sheet: https://python-future.org/compatible_idioms.html -.. _coverage.py: https://pypi.org/project/coverage -.. _Futurize: https://python-future.org/automatic_conversion.html -.. _importlib2: https://pypi.org/project/importlib2 -.. _Modernize: https://python-modernize.readthedocs.io/ -.. _mypy: https://mypy-lang.org/ -.. _Porting to Python 3: http://python3porting.com/ -.. _Pylint: https://pypi.org/project/pylint +If you are looking to port an extension module instead of pure Python code, +please see :ref:`cporting-howto`. -.. _Python 3 Q & A: https://ncoghlan-devs-python-notes.readthedocs.io/en/latest/python3/questions_and_answers.html +The archived python-porting_ mailing list may contain some useful guidance. -.. _pytype: https://github.com/google/pytype -.. _python-future: https://python-future.org/ -.. _python-porting: https://mail.python.org/pipermail/python-porting/ -.. _six: https://pypi.org/project/six -.. _tox: https://pypi.org/project/tox -.. _trove classifier: https://pypi.org/classifiers +Since Python 3.13 the original porting guide was discontinued. +You can find the old guide in the +`archive `_. + + +Third-party guides +================== -.. _Why Python 3 exists: https://snarky.ca/why-python-3-exists +There are also multiple third-party guides that might be useful: + +- `Guide by Fedora `_ +- `PyCon 2020 tutorial `_ +- `Guide by DigitalOcean `_ +- `Guide by ActiveState `_ + + +.. _python-porting: https://mail.python.org/pipermail/python-porting/ diff -Nru python3.11-3.11.8/Doc/library/abc.rst python3.11-3.11.9/Doc/library/abc.rst --- python3.11-3.11.8/Doc/library/abc.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/abc.rst 2024-04-02 08:25:04.000000000 +0000 @@ -101,11 +101,11 @@ subclass of the ABC. (This class method is called from the :meth:`~class.__subclasscheck__` method of the ABC.) - This method should return ``True``, ``False`` or ``NotImplemented``. If + This method should return ``True``, ``False`` or :data:`NotImplemented`. If it returns ``True``, the *subclass* is considered a subclass of this ABC. If it returns ``False``, the *subclass* is not considered a subclass of this ABC, even if it would normally be one. If it returns - ``NotImplemented``, the subclass check is continued with the usual + :data:`!NotImplemented`, the subclass check is continued with the usual mechanism. .. XXX explain the "usual mechanism" diff -Nru python3.11-3.11.8/Doc/library/array.rst python3.11-3.11.9/Doc/library/array.rst --- python3.11-3.11.8/Doc/library/array.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/array.rst 2024-04-02 08:25:04.000000000 +0000 @@ -273,4 +273,3 @@ `NumPy `_ The NumPy package defines another array type. - diff -Nru python3.11-3.11.8/Doc/library/ast.rst python3.11-3.11.9/Doc/library/ast.rst --- python3.11-3.11.8/Doc/library/ast.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/ast.rst 2024-04-02 08:25:04.000000000 +0000 @@ -2038,14 +2038,17 @@ modified to correspond to :pep:`484` "signature type comments", e.g. ``(str, int) -> List[str]``. - Also, setting ``feature_version`` to a tuple ``(major, minor)`` - will attempt to parse using that Python version's grammar. - Currently ``major`` must equal to ``3``. For example, setting - ``feature_version=(3, 4)`` will allow the use of ``async`` and - ``await`` as variable names. The lowest supported version is - ``(3, 4)``; the highest is ``sys.version_info[0:2]``. + Setting ``feature_version`` to a tuple ``(major, minor)`` will result in + a "best-effort" attempt to parse using that Python version's grammar. + For example, setting ``feature_version=(3, 9)`` will attempt to disallow + parsing of :keyword:`match` statements. + Currently ``major`` must equal to ``3``. The lowest supported version is + ``(3, 4)`` (and this may increase in future Python versions); + the highest is ``sys.version_info[0:2]``. "Best-effort" attempt means there + is no guarantee that the parse (or success of the parse) is the same as + when run on the Python version corresponding to ``feature_version``. - If source contains a null character ('\0'), :exc:`ValueError` is raised. + If source contains a null character (``\0``), :exc:`ValueError` is raised. .. warning:: Note that successfully parsing source code into an AST object doesn't diff -Nru python3.11-3.11.8/Doc/library/asyncio-eventloop.rst python3.11-3.11.9/Doc/library/asyncio-eventloop.rst --- python3.11-3.11.8/Doc/library/asyncio-eventloop.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/asyncio-eventloop.rst 2024-04-02 08:25:04.000000000 +0000 @@ -588,6 +588,9 @@ The *family*, *proto*, *flags*, *reuse_address*, *reuse_port*, *allow_broadcast*, and *sock* parameters were added. + .. versionchanged:: 3.8 + Added support for Windows. + .. versionchanged:: 3.8.1 The *reuse_address* parameter is no longer supported, as using :ref:`socket.SO_REUSEADDR ` @@ -605,11 +608,8 @@ prevents processes with differing UIDs from assigning sockets to the same socket address. - .. versionchanged:: 3.8 - Added support for Windows. - .. versionchanged:: 3.11 - The *reuse_address* parameter, disabled since Python 3.9.0, 3.8.1, + The *reuse_address* parameter, disabled since Python 3.8.1, 3.7.6 and 3.6.10, has been entirely removed. .. coroutinemethod:: loop.create_unix_connection(protocol_factory, \ diff -Nru python3.11-3.11.8/Doc/library/asyncio-protocol.rst python3.11-3.11.9/Doc/library/asyncio-protocol.rst --- python3.11-3.11.8/Doc/library/asyncio-protocol.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/asyncio-protocol.rst 2024-04-02 08:25:04.000000000 +0000 @@ -417,8 +417,8 @@ Stop the subprocess. - On POSIX systems, this method sends SIGTERM to the subprocess. - On Windows, the Windows API function TerminateProcess() is called to + On POSIX systems, this method sends :py:const:`~signal.SIGTERM` to the subprocess. + On Windows, the Windows API function :c:func:`!TerminateProcess` is called to stop the subprocess. See also :meth:`subprocess.Popen.terminate`. diff -Nru python3.11-3.11.8/Doc/library/asyncio-stream.rst python3.11-3.11.9/Doc/library/asyncio-stream.rst --- python3.11-3.11.8/Doc/library/asyncio-stream.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/asyncio-stream.rst 2024-04-02 08:25:04.000000000 +0000 @@ -347,7 +347,7 @@ be resumed. When there is nothing to wait for, the :meth:`drain` returns immediately. - .. coroutinemethod:: start_tls(sslcontext, \*, server_hostname=None, \ + .. coroutinemethod:: start_tls(sslcontext, *, server_hostname=None, \ ssl_handshake_timeout=None) Upgrade an existing stream-based connection to TLS. diff -Nru python3.11-3.11.8/Doc/library/asyncio-subprocess.rst python3.11-3.11.9/Doc/library/asyncio-subprocess.rst --- python3.11-3.11.8/Doc/library/asyncio-subprocess.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/asyncio-subprocess.rst 2024-04-02 08:25:04.000000000 +0000 @@ -235,7 +235,7 @@ .. note:: - On Windows, :py:data:`SIGTERM` is an alias for :meth:`terminate`. + On Windows, :py:const:`~signal.SIGTERM` is an alias for :meth:`terminate`. ``CTRL_C_EVENT`` and ``CTRL_BREAK_EVENT`` can be sent to processes started with a *creationflags* parameter which includes ``CREATE_NEW_PROCESS_GROUP``. @@ -244,10 +244,10 @@ Stop the child process. - On POSIX systems this method sends :py:const:`signal.SIGTERM` to the + On POSIX systems this method sends :py:const:`~signal.SIGTERM` to the child process. - On Windows the Win32 API function :c:func:`TerminateProcess` is + On Windows the Win32 API function :c:func:`!TerminateProcess` is called to stop the child process. .. method:: kill() diff -Nru python3.11-3.11.8/Doc/library/audit_events.rst python3.11-3.11.9/Doc/library/audit_events.rst --- python3.11-3.11.8/Doc/library/audit_events.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/audit_events.rst 2024-04-02 08:25:04.000000000 +0000 @@ -7,7 +7,7 @@ This table contains all events raised by :func:`sys.audit` or :c:func:`PySys_Audit` calls throughout the CPython runtime and the -standard library. These calls were added in 3.8.0 or later (see :pep:`578`). +standard library. These calls were added in 3.8 or later (see :pep:`578`). See :func:`sys.addaudithook` and :c:func:`PySys_AddAuditHook` for information on handling these events. diff -Nru python3.11-3.11.8/Doc/library/bdb.rst python3.11-3.11.9/Doc/library/bdb.rst --- python3.11-3.11.8/Doc/library/bdb.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/bdb.rst 2024-04-02 08:25:04.000000000 +0000 @@ -148,8 +148,8 @@ .. method:: reset() - Set the :attr:`botframe`, :attr:`stopframe`, :attr:`returnframe` and - :attr:`quitting` attributes with values ready to start debugging. + Set the :attr:`!botframe`, :attr:`!stopframe`, :attr:`!returnframe` and + :attr:`quitting ` attributes with values ready to start debugging. .. method:: trace_dispatch(frame, event, arg) @@ -182,7 +182,7 @@ If the debugger should stop on the current line, invoke the :meth:`user_line` method (which should be overridden in subclasses). - Raise a :exc:`BdbQuit` exception if the :attr:`Bdb.quitting` flag is set + Raise a :exc:`BdbQuit` exception if the :attr:`quitting ` flag is set (which can be set from :meth:`user_line`). Return a reference to the :meth:`trace_dispatch` method for further tracing in that scope. @@ -190,7 +190,7 @@ If the debugger should stop on this function call, invoke the :meth:`user_call` method (which should be overridden in subclasses). - Raise a :exc:`BdbQuit` exception if the :attr:`Bdb.quitting` flag is set + Raise a :exc:`BdbQuit` exception if the :attr:`quitting ` flag is set (which can be set from :meth:`user_call`). Return a reference to the :meth:`trace_dispatch` method for further tracing in that scope. @@ -198,7 +198,7 @@ If the debugger should stop on this function return, invoke the :meth:`user_return` method (which should be overridden in subclasses). - Raise a :exc:`BdbQuit` exception if the :attr:`Bdb.quitting` flag is set + Raise a :exc:`BdbQuit` exception if the :attr:`quitting ` flag is set (which can be set from :meth:`user_return`). Return a reference to the :meth:`trace_dispatch` method for further tracing in that scope. @@ -206,7 +206,7 @@ If the debugger should stop at this exception, invokes the :meth:`user_exception` method (which should be overridden in subclasses). - Raise a :exc:`BdbQuit` exception if the :attr:`Bdb.quitting` flag is set + Raise a :exc:`BdbQuit` exception if the :attr:`quitting ` flag is set (which can be set from :meth:`user_exception`). Return a reference to the :meth:`trace_dispatch` method for further tracing in that scope. @@ -293,7 +293,9 @@ .. method:: set_quit() - Set the :attr:`quitting` attribute to ``True``. This raises :exc:`BdbQuit` in + .. index:: single: quitting (bdb.Bdb attribute) + + Set the :attr:`!quitting` attribute to ``True``. This raises :exc:`BdbQuit` in the next call to one of the :meth:`!dispatch_\*` methods. @@ -383,7 +385,7 @@ .. method:: run(cmd, globals=None, locals=None) Debug a statement executed via the :func:`exec` function. *globals* - defaults to :attr:`__main__.__dict__`, *locals* defaults to *globals*. + defaults to :attr:`!__main__.__dict__`, *locals* defaults to *globals*. .. method:: runeval(expr, globals=None, locals=None) diff -Nru python3.11-3.11.8/Doc/library/bz2.rst python3.11-3.11.9/Doc/library/bz2.rst --- python3.11-3.11.8/Doc/library/bz2.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/bz2.rst 2024-04-02 08:25:04.000000000 +0000 @@ -156,7 +156,6 @@ Support was added for *filename* being a :term:`file object` instead of an actual filename. - .. versionchanged:: 3.3 The ``'a'`` (append) mode was added, along with support for reading multi-stream files. diff -Nru python3.11-3.11.8/Doc/library/codecs.rst python3.11-3.11.9/Doc/library/codecs.rst --- python3.11-3.11.8/Doc/library/codecs.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/codecs.rst 2024-04-02 08:25:04.000000000 +0000 @@ -1542,13 +1542,13 @@ .. availability:: Windows. -.. versionchanged:: 3.3 - Support any error handler. - .. versionchanged:: 3.2 Before 3.2, the *errors* argument was ignored; ``'replace'`` was always used to encode, and ``'ignore'`` to decode. +.. versionchanged:: 3.3 + Support any error handler. + :mod:`encodings.utf_8_sig` --- UTF-8 codec with BOM signature ------------------------------------------------------------- diff -Nru python3.11-3.11.8/Doc/library/collections.rst python3.11-3.11.9/Doc/library/collections.rst --- python3.11-3.11.8/Doc/library/collections.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/collections.rst 2024-04-02 08:25:04.000000000 +0000 @@ -342,7 +342,7 @@ All of those tests treat missing elements as having zero counts so that ``Counter(a=1) == Counter(a=1, b=0)`` returns true. -.. versionadded:: 3.10 +.. versionchanged:: 3.10 Rich comparison operations were added. .. versionchanged:: 3.10 diff -Nru python3.11-3.11.8/Doc/library/constants.rst python3.11-3.11.9/Doc/library/constants.rst --- python3.11-3.11.8/Doc/library/constants.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/constants.rst 2024-04-02 08:25:04.000000000 +0000 @@ -33,27 +33,27 @@ the other type; may be returned by the in-place binary special methods (e.g. :meth:`~object.__imul__`, :meth:`~object.__iand__`, etc.) for the same purpose. It should not be evaluated in a boolean context. - ``NotImplemented`` is the sole instance of the :data:`types.NotImplementedType` type. + :data:`!NotImplemented` is the sole instance of the :data:`types.NotImplementedType` type. .. note:: - When a binary (or in-place) method returns ``NotImplemented`` the + When a binary (or in-place) method returns :data:`!NotImplemented` the interpreter will try the reflected operation on the other type (or some other fallback, depending on the operator). If all attempts return - ``NotImplemented``, the interpreter will raise an appropriate exception. - Incorrectly returning ``NotImplemented`` will result in a misleading - error message or the ``NotImplemented`` value being returned to Python code. + :data:`!NotImplemented`, the interpreter will raise an appropriate exception. + Incorrectly returning :data:`!NotImplemented` will result in a misleading + error message or the :data:`!NotImplemented` value being returned to Python code. See :ref:`implementing-the-arithmetic-operations` for examples. .. note:: - ``NotImplementedError`` and ``NotImplemented`` are not interchangeable, + ``NotImplementedError`` and :data:`!NotImplemented` are not interchangeable, even though they have similar names and purposes. See :exc:`NotImplementedError` for details on when to use it. .. versionchanged:: 3.9 - Evaluating ``NotImplemented`` in a boolean context is deprecated. While + Evaluating :data:`!NotImplemented` in a boolean context is deprecated. While it currently evaluates as true, it will emit a :exc:`DeprecationWarning`. It will raise a :exc:`TypeError` in a future version of Python. diff -Nru python3.11-3.11.8/Doc/library/ctypes.rst python3.11-3.11.9/Doc/library/ctypes.rst --- python3.11-3.11.8/Doc/library/ctypes.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/ctypes.rst 2024-04-02 08:25:04.000000000 +0000 @@ -92,7 +92,6 @@ Functions are accessed as attributes of dll objects:: - >>> from ctypes import * >>> libc.printf <_FuncPtr object at 0x...> >>> print(windll.kernel32.GetModuleHandleA) # doctest: +WINDOWS diff -Nru python3.11-3.11.8/Doc/library/dataclasses.rst python3.11-3.11.9/Doc/library/dataclasses.rst --- python3.11-3.11.8/Doc/library/dataclasses.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/dataclasses.rst 2024-04-02 08:25:04.000000000 +0000 @@ -1,5 +1,5 @@ -:mod:`dataclasses` --- Data Classes -=================================== +:mod:`!dataclasses` --- Data Classes +==================================== .. module:: dataclasses :synopsis: Generate special methods on user-defined classes. @@ -31,7 +31,7 @@ def total_cost(self) -> float: return self.unit_price * self.quantity_on_hand -will add, among other things, a :meth:`~object.__init__` that looks like:: +will add, among other things, a :meth:`!__init__` that looks like:: def __init__(self, name: str, unit_price: float, quantity_on_hand: int = 0): self.name = name @@ -49,26 +49,26 @@ .. decorator:: dataclass(*, init=True, repr=True, eq=True, order=False, unsafe_hash=False, frozen=False, match_args=True, kw_only=False, slots=False, weakref_slot=False) This function is a :term:`decorator` that is used to add generated - :term:`special method`\s to classes, as described below. + :term:`special methods ` to classes, as described below. - The :func:`dataclass` decorator examines the class to find + The ``@dataclass`` decorator examines the class to find ``field``\s. A ``field`` is defined as a class variable that has a :term:`type annotation `. With two - exceptions described below, nothing in :func:`dataclass` + exceptions described below, nothing in ``@dataclass`` examines the type specified in the variable annotation. The order of the fields in all of the generated methods is the order in which they appear in the class definition. - The :func:`dataclass` decorator will add various "dunder" methods to + The ``@dataclass`` decorator will add various "dunder" methods to the class, described below. If any of the added methods already exist in the class, the behavior depends on the parameter, as documented below. The decorator returns the same class that it is called on; no new class is created. - If :func:`dataclass` is used just as a simple decorator with no parameters, + If ``@dataclass`` is used just as a simple decorator with no parameters, it acts as if it has the default values documented in this - signature. That is, these three uses of :func:`dataclass` are + signature. That is, these three uses of ``@dataclass`` are equivalent:: @dataclass @@ -84,12 +84,12 @@ class C: ... - The parameters to :func:`dataclass` are: + The parameters to ``@dataclass`` are: - ``init``: If true (the default), a :meth:`~object.__init__` method will be generated. - If the class already defines :meth:`~object.__init__`, this parameter is + If the class already defines :meth:`!__init__`, this parameter is ignored. - ``repr``: If true (the default), a :meth:`~object.__repr__` method will be @@ -99,7 +99,7 @@ are not included. For example: ``InventoryItem(name='widget', unit_price=3.0, quantity_on_hand=10)``. - If the class already defines :meth:`~object.__repr__`, this parameter is + If the class already defines :meth:`!__repr__`, this parameter is ignored. - ``eq``: If true (the default), an :meth:`~object.__eq__` method will be @@ -107,7 +107,7 @@ of its fields, in order. Both instances in the comparison must be of the identical type. - If the class already defines :meth:`~object.__eq__`, this parameter is + If the class already defines :meth:`!__eq__`, this parameter is ignored. - ``order``: If true (the default is ``False``), :meth:`~object.__lt__`, @@ -117,43 +117,43 @@ identical type. If ``order`` is true and ``eq`` is false, a :exc:`ValueError` is raised. - If the class already defines any of :meth:`~object.__lt__`, - :meth:`~object.__le__`, :meth:`~object.__gt__`, or :meth:`~object.__ge__`, then + If the class already defines any of :meth:`!__lt__`, + :meth:`!__le__`, :meth:`!__gt__`, or :meth:`!__ge__`, then :exc:`TypeError` is raised. - ``unsafe_hash``: If ``False`` (the default), a :meth:`~object.__hash__` method is generated according to how ``eq`` and ``frozen`` are set. - :meth:`~object.__hash__` is used by built-in :meth:`hash()`, and when objects are + :meth:`!__hash__` is used by built-in :meth:`hash()`, and when objects are added to hashed collections such as dictionaries and sets. Having a - :meth:`~object.__hash__` implies that instances of the class are immutable. + :meth:`!__hash__` implies that instances of the class are immutable. Mutability is a complicated property that depends on the programmer's - intent, the existence and behavior of :meth:`~object.__eq__`, and the values of - the ``eq`` and ``frozen`` flags in the :func:`dataclass` decorator. + intent, the existence and behavior of :meth:`!__eq__`, and the values of + the ``eq`` and ``frozen`` flags in the ``@dataclass`` decorator. - By default, :func:`dataclass` will not implicitly add a :meth:`~object.__hash__` + By default, ``@dataclass`` will not implicitly add a :meth:`~object.__hash__` method unless it is safe to do so. Neither will it add or change an - existing explicitly defined :meth:`~object.__hash__` method. Setting the class + existing explicitly defined :meth:`!__hash__` method. Setting the class attribute ``__hash__ = None`` has a specific meaning to Python, as - described in the :meth:`~object.__hash__` documentation. + described in the :meth:`!__hash__` documentation. - If :meth:`~object.__hash__` is not explicitly defined, or if it is set to ``None``, - then :func:`dataclass` *may* add an implicit :meth:`~object.__hash__` method. - Although not recommended, you can force :func:`dataclass` to create a - :meth:`~object.__hash__` method with ``unsafe_hash=True``. This might be the case - if your class is logically immutable but can nonetheless be mutated. + If :meth:`!__hash__` is not explicitly defined, or if it is set to ``None``, + then ``@dataclass`` *may* add an implicit :meth:`!__hash__` method. + Although not recommended, you can force ``@dataclass`` to create a + :meth:`!__hash__` method with ``unsafe_hash=True``. This might be the case + if your class is logically immutable but can still be mutated. This is a specialized use case and should be considered carefully. - Here are the rules governing implicit creation of a :meth:`~object.__hash__` - method. Note that you cannot both have an explicit :meth:`~object.__hash__` + Here are the rules governing implicit creation of a :meth:`!__hash__` + method. Note that you cannot both have an explicit :meth:`!__hash__` method in your dataclass and set ``unsafe_hash=True``; this will result in a :exc:`TypeError`. - If ``eq`` and ``frozen`` are both true, by default :func:`dataclass` will - generate a :meth:`~object.__hash__` method for you. If ``eq`` is true and - ``frozen`` is false, :meth:`~object.__hash__` will be set to ``None``, marking it + If ``eq`` and ``frozen`` are both true, by default ``@dataclass`` will + generate a :meth:`!__hash__` method for you. If ``eq`` is true and + ``frozen`` is false, :meth:`!__hash__` will be set to ``None``, marking it unhashable (which it is, since it is mutable). If ``eq`` is false, - :meth:`~object.__hash__` will be left untouched meaning the :meth:`~object.__hash__` + :meth:`!__hash__` will be left untouched meaning the :meth:`!__hash__` method of the superclass will be used (if the superclass is :class:`object`, this means it will fall back to id-based hashing). @@ -165,7 +165,7 @@ - ``match_args``: If true (the default is ``True``), the ``__match_args__`` tuple will be created from the list of parameters to the generated :meth:`~object.__init__` method (even if - :meth:`~object.__init__` is not generated, see above). If false, or if + :meth:`!__init__` is not generated, see above). If false, or if ``__match_args__`` is already defined in the class, then ``__match_args__`` will not be generated. @@ -175,7 +175,7 @@ fields will be marked as keyword-only. If a field is marked as keyword-only, then the only effect is that the :meth:`~object.__init__` parameter generated from a keyword-only field must be specified - with a keyword when :meth:`~object.__init__` is called. There is no + with a keyword when :meth:`!__init__` is called. There is no effect on any other aspect of dataclasses. See the :term:`parameter` glossary entry for details. Also see the :const:`KW_ONLY` section. @@ -184,7 +184,7 @@ - ``slots``: If true (the default is ``False``), :attr:`~object.__slots__` attribute will be generated and new class will be returned instead of the original one. - If :attr:`~object.__slots__` is already defined in the class, then :exc:`TypeError` + If :attr:`!__slots__` is already defined in the class, then :exc:`TypeError` is raised. .. versionadded:: 3.10 @@ -229,7 +229,7 @@ required. There are, however, some dataclass features that require additional per-field information. To satisfy this need for additional information, you can replace the default field value - with a call to the provided :func:`field` function. For example:: + with a call to the provided :func:`!field` function. For example:: @dataclass class C: @@ -243,10 +243,10 @@ used because ``None`` is a valid value for some parameters with a distinct meaning. No code should directly use the :const:`MISSING` value. - The parameters to :func:`field` are: + The parameters to :func:`!field` are: - ``default``: If provided, this will be the default value for this - field. This is needed because the :meth:`field` call itself + field. This is needed because the :func:`!field` call itself replaces the normal position of the default value. - ``default_factory``: If provided, it must be a zero-argument @@ -293,10 +293,10 @@ .. versionadded:: 3.10 If the default value of a field is specified by a call to - :func:`field()`, then the class attribute for this field will be + :func:`!field`, then the class attribute for this field will be replaced by the specified ``default`` value. If no ``default`` is provided, then the class attribute will be deleted. The intent is - that after the :func:`dataclass` decorator runs, the class + that after the :func:`@dataclass ` decorator runs, the class attributes will all contain the default values for the fields, just as if the default value itself were specified. For example, after:: @@ -314,10 +314,10 @@ .. class:: Field - :class:`Field` objects describe each defined field. These objects + :class:`!Field` objects describe each defined field. These objects are created internally, and are returned by the :func:`fields` module-level method (see below). Users should never instantiate a - :class:`Field` object directly. Its documented attributes are: + :class:`!Field` object directly. Its documented attributes are: - ``name``: The name of the field. - ``type``: The type of the field. @@ -343,7 +343,7 @@ lists, and tuples are recursed into. Other objects are copied with :func:`copy.deepcopy`. - Example of using :func:`asdict` on nested dataclasses:: + Example of using :func:`!asdict` on nested dataclasses:: @dataclass class Point: @@ -364,7 +364,7 @@ dict((field.name, getattr(obj, field.name)) for field in fields(obj)) - :func:`asdict` raises :exc:`TypeError` if ``obj`` is not a dataclass + :func:`!asdict` raises :exc:`TypeError` if ``obj`` is not a dataclass instance. .. function:: astuple(obj, *, tuple_factory=tuple) @@ -384,7 +384,7 @@ tuple(getattr(obj, field.name) for field in dataclasses.fields(obj)) - :func:`astuple` raises :exc:`TypeError` if ``obj`` is not a dataclass + :func:`!astuple` raises :exc:`TypeError` if ``obj`` is not a dataclass instance. .. function:: make_dataclass(cls_name, fields, *, bases=(), namespace=None, init=True, repr=True, eq=True, order=False, unsafe_hash=False, frozen=False, match_args=True, kw_only=False, slots=False, weakref_slot=False) @@ -397,11 +397,11 @@ ``typing.Any`` is used for ``type``. The values of ``init``, ``repr``, ``eq``, ``order``, ``unsafe_hash``, ``frozen``, ``match_args``, ``kw_only``, ``slots``, and ``weakref_slot`` have - the same meaning as they do in :func:`dataclass`. + the same meaning as they do in :func:`@dataclass `. This function is not strictly required, because any Python mechanism for creating a new class with ``__annotations__`` can - then apply the :func:`dataclass` function to convert that class to + then apply the ``@dataclass`` function to convert that class to a dataclass. This function is provided as a convenience. For example:: @@ -434,16 +434,16 @@ :ref:`__post_init__ `, if present, is also called. Init-only variables without default values, if any exist, must be - specified on the call to :func:`replace` so that they can be passed to - :meth:`~object.__init__` and :ref:`__post_init__ `. + specified on the call to :func:`!replace` so that they can be passed to + :meth:`!__init__` and :meth:`__post_init__`. It is an error for ``changes`` to contain any fields that are defined as having ``init=False``. A :exc:`ValueError` will be raised in this case. Be forewarned about how ``init=False`` fields work during a call to - :func:`replace`. They are not copied from the source object, but - rather are initialized in :ref:`__post_init__ `, if they're + :func:`!replace`. They are not copied from the source object, but + rather are initialized in :meth:`__post_init__`, if they're initialized at all. It is expected that ``init=False`` fields will be rarely and judiciously used. If they are used, it might be wise to have alternate class constructors, or perhaps a custom @@ -469,11 +469,11 @@ .. data:: KW_ONLY A sentinel value used as a type annotation. Any fields after a - pseudo-field with the type of :const:`KW_ONLY` are marked as + pseudo-field with the type of :const:`!KW_ONLY` are marked as keyword-only fields. Note that a pseudo-field of type - :const:`KW_ONLY` is otherwise completely ignored. This includes the + :const:`!KW_ONLY` is otherwise completely ignored. This includes the name of such a field. By convention, a name of ``_`` is used for a - :const:`KW_ONLY` field. Keyword-only fields signify + :const:`!KW_ONLY` field. Keyword-only fields signify :meth:`~object.__init__` parameters that must be specified as keywords when the class is instantiated. @@ -489,7 +489,7 @@ p = Point(0, y=1.5, z=2.0) In a single dataclass, it is an error to specify more than one - field whose type is :const:`KW_ONLY`. + field whose type is :const:`!KW_ONLY`. .. versionadded:: 3.10 @@ -512,11 +512,16 @@ class. If no :meth:`~object.__init__` method is generated, then :meth:`!__post_init__` will not automatically be called. -Among other uses, this allows for initializing field values that -depend on one or more other fields. For example:: + When defined on the class, it will be called by the generated + :meth:`~object.__init__`, normally as ``self.__post_init__()``. + However, if any ``InitVar`` fields are defined, they will also be + passed to :meth:`!__post_init__` in the order they were defined in the + class. If no :meth:`!__init__` method is generated, then + :meth:`!__post_init__` will not automatically be called. @dataclass class C: + a: float b: float c: float = field(init=False) @@ -524,8 +529,8 @@ def __post_init__(self): self.c = self.a + self.b -The :meth:`~object.__init__` method generated by :func:`dataclass` does not call base -class :meth:`~object.__init__` methods. If the base class has an :meth:`~object.__init__` method +The :meth:`~object.__init__` method generated by :func:`@dataclass ` does not call base +class :meth:`!__init__` methods. If the base class has an :meth:`!__init__` method that has to be called, it is common to call this method in a :meth:`!__post_init__` method:: @@ -541,7 +546,7 @@ def __post_init__(self): super().__init__(self.side, self.side) -Note, however, that in general the dataclass-generated :meth:`~object.__init__` methods +Note, however, that in general the dataclass-generated :meth:`!__init__` methods don't need to be called, since the derived dataclass will take care of initializing all fields of any base class that is a dataclass itself. @@ -552,7 +557,7 @@ Class variables --------------- -One of the few places where :func:`dataclass` actually inspects the type +One of the few places where :func:`@dataclass ` actually inspects the type of a field is to determine if a field is a class variable as defined in :pep:`526`. It does this by checking if the type of the field is ``typing.ClassVar``. If a field is a ``ClassVar``, it is excluded @@ -563,7 +568,7 @@ Init-only variables ------------------- -Another place where :func:`dataclass` inspects a type annotation is to +Another place where :func:`@dataclass ` inspects a type annotation is to determine if a field is an init-only variable. It does this by seeing if the type of a field is of type ``dataclasses.InitVar``. If a field is an ``InitVar``, it is considered a pseudo-field called an init-only @@ -595,19 +600,19 @@ ---------------- It is not possible to create truly immutable Python objects. However, -by passing ``frozen=True`` to the :meth:`dataclass` decorator you can +by passing ``frozen=True`` to the :func:`@dataclass ` decorator you can emulate immutability. In that case, dataclasses will add :meth:`~object.__setattr__` and :meth:`~object.__delattr__` methods to the class. These methods will raise a :exc:`FrozenInstanceError` when invoked. There is a tiny performance penalty when using ``frozen=True``: :meth:`~object.__init__` cannot use simple assignment to initialize fields, and -must use :meth:`!object.__setattr__`. +must use :meth:`!__setattr__`. Inheritance ----------- -When the dataclass is being created by the :meth:`dataclass` decorator, +When the dataclass is being created by the :func:`@dataclass ` decorator, it looks through all of the class's base classes in reverse MRO (that is, starting at :class:`object`) and, for each dataclass that it finds, adds the fields from that base class to an ordered mapping of fields. @@ -634,8 +639,8 @@ def __init__(self, x: int = 15, y: int = 0, z: int = 10): -Re-ordering of keyword-only parameters in :meth:`~object.__init__` ------------------------------------------------------------------- +Re-ordering of keyword-only parameters in :meth:`!__init__` +----------------------------------------------------------- After the parameters needed for :meth:`~object.__init__` are computed, any keyword-only parameters are moved to come after all regular @@ -658,7 +663,7 @@ z: int = 10 t: int = field(kw_only=True, default=0) -The generated :meth:`~object.__init__` method for ``D`` will look like:: +The generated :meth:`!__init__` method for ``D`` will look like:: def __init__(self, x: Any = 15.0, z: int = 10, *, y: int = 0, w: int = 1, t: int = 0): @@ -667,7 +672,7 @@ followed by parameters derived from keyword-only fields. The relative ordering of keyword-only parameters is maintained in the -re-ordered :meth:`~object.__init__` parameter list. +re-ordered :meth:`!__init__` parameter list. Default factory functions @@ -682,7 +687,7 @@ If a field is excluded from :meth:`~object.__init__` (using ``init=False``) and the field also specifies ``default_factory``, then the default factory function will always be called from the generated -:meth:`~object.__init__` function. This happens because there is no other +:meth:`!__init__` function. This happens because there is no other way to give the field an initial value. Mutable default values @@ -712,7 +717,7 @@ class D: x: list = [] # This code raises ValueError def add(self, element): - self.x += element + self.x.append(element) it would generate code similar to:: @@ -721,7 +726,7 @@ def __init__(self, x=x): self.x = x def add(self, element): - self.x += element + self.x.append(element) assert D().x is D().x @@ -731,7 +736,7 @@ of ``x``. Because dataclasses just use normal Python class creation they also share this behavior. There is no general way for Data Classes to detect this condition. Instead, the -:func:`dataclass` decorator will raise a :exc:`ValueError` if it +:func:`@dataclass ` decorator will raise a :exc:`ValueError` if it detects an unhashable default parameter. The assumption is that if a value is unhashable, it is mutable. This is a partial solution, but it does protect against many common errors. @@ -757,15 +762,17 @@ Fields that are assigned :ref:`descriptor objects ` as their default value have the following special behaviors: -* The value for the field passed to the dataclass's ``__init__`` method is - passed to the descriptor's ``__set__`` method rather than overwriting the +* The value for the field passed to the dataclass's :meth:`~object.__init__` method is + passed to the descriptor's :meth:`~object.__set__` method rather than overwriting the descriptor object. + * Similarly, when getting or setting the field, the descriptor's - ``__get__`` or ``__set__`` method is called rather than returning or + :meth:`~object.__get__` or :meth:`!__set__` method is called rather than returning or overwriting the descriptor object. -* To determine whether a field contains a default value, ``dataclasses`` - will call the descriptor's ``__get__`` method using its class access - form (i.e. ``descriptor.__get__(obj=None, type=cls)``. If the + +* To determine whether a field contains a default value, :func:`@dataclass ` + will call the descriptor's :meth:`!__get__` method using its class access + form: ``descriptor.__get__(obj=None, type=cls)``. If the descriptor returns a value in this case, it will be used as the field's default. On the other hand, if the descriptor raises :exc:`AttributeError` in this situation, no default value will be diff -Nru python3.11-3.11.8/Doc/library/datetime.rst python3.11-3.11.9/Doc/library/datetime.rst --- python3.11-3.11.8/Doc/library/datetime.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/datetime.rst 2024-04-02 08:25:04.000000000 +0000 @@ -1185,6 +1185,9 @@ that are not also :class:`!datetime` instances, even if they represent the same date. + If both comparands are aware, and have the same :attr:`!tzinfo` attribute, + the :attr:`!tzinfo` and :attr:`~.datetime.fold` attributes are ignored and + the base datetimes are compared. If both comparands are aware and have different :attr:`~.datetime.tzinfo` attributes, the comparison acts as comparands were first converted to UTC datetimes except that the implementation never overflows. @@ -1199,6 +1202,9 @@ as well as a :class:`!datetime` object and a :class:`!date` object that is not also a :class:`!datetime` instance, raises :exc:`TypeError`. + If both comparands are aware, and have the same :attr:`!tzinfo` attribute, + the :attr:`!tzinfo` and :attr:`~.datetime.fold` attributes are ignored and + the base datetimes are compared. If both comparands are aware and have different :attr:`~.datetime.tzinfo` attributes, the comparison acts as comparands were first converted to UTC datetimes except that the implementation never overflows. @@ -1744,8 +1750,8 @@ Order comparison between naive and aware :class:`!time` objects raises :exc:`TypeError`. -If both comparands are aware, and have -the same :attr:`~.time.tzinfo` attribute, the common :attr:`!tzinfo` attribute is +If both comparands are aware, and have the same :attr:`~.time.tzinfo` +attribute, the :attr:`!tzinfo` and :attr:`!fold` attributes are ignored and the base times are compared. If both comparands are aware and have different :attr:`!tzinfo` attributes, the comparands are first adjusted by subtracting their UTC offsets (obtained from ``self.utcoffset()``). @@ -1777,7 +1783,7 @@ be truncated). 4. Fractional hours and minutes are not supported. - Examples:: + Examples: .. doctest:: diff -Nru python3.11-3.11.8/Doc/library/dbm.rst python3.11-3.11.9/Doc/library/dbm.rst --- python3.11-3.11.8/Doc/library/dbm.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/dbm.rst 2024-04-02 08:25:04.000000000 +0000 @@ -56,10 +56,6 @@ The Unix file access mode of the file (default: octal ``0o666``), used only when the database has to be created. -.. |incompat_note| replace:: - The file formats created by :mod:`dbm.gnu` and :mod:`dbm.ndbm` are incompatible - and can not be used interchangeably. - .. function:: open(file, flag='r', mode=0o666) Open a database and return the corresponding database object. @@ -160,11 +156,10 @@ library, similar to the :mod:`dbm.ndbm` module, but with additional functionality like crash tolerance. -:class:`!gdbm` objects behave similar to :term:`mappings `, -except that keys and values are always converted to :class:`bytes` before storing, -and the :meth:`!items` and :meth:`!values` methods are not supported. +.. note:: -.. note:: |incompat_note| + The file formats created by :mod:`dbm.gnu` and :mod:`dbm.ndbm` are incompatible + and can not be used interchangeably. .. exception:: error @@ -211,8 +206,9 @@ A string of characters the *flag* parameter of :meth:`~dbm.gnu.open` supports. - In addition to the dictionary-like methods, :class:`gdbm` objects have the - following methods and attributes: + :class:`!gdbm` objects behave similar to :term:`mappings `, + but :meth:`!items` and :meth:`!values` methods are not supported. + The following methods are also provided: .. method:: gdbm.firstkey() @@ -263,14 +259,13 @@ The :mod:`dbm.ndbm` module provides an interface to the :abbr:`NDBM (New Database Manager)` library. -:class:`!ndbm` objects behave similar to :term:`mappings `, -except that keys and values are always stored as :class:`bytes`, -and the :meth:`!items` and :meth:`!values` methods are not supported. - This module can be used with the "classic" NDBM interface or the :abbr:`GDBM (GNU dbm)` compatibility interface. -.. note:: |incompat_note| +.. note:: + + The file formats created by :mod:`dbm.gnu` and :mod:`dbm.ndbm` are incompatible + and can not be used interchangeably. .. warning:: @@ -308,8 +303,9 @@ :param int mode: |mode_param_doc| - In addition to the dictionary-like methods, :class:`!ndbm` objects - provide the following method: + :class:`!ndbm` objects behave similar to :term:`mappings `, + but :meth:`!items` and :meth:`!values` methods are not supported. + The following methods are also provided: .. versionchanged:: 3.11 Accepts :term:`path-like object` for filename. @@ -342,8 +338,6 @@ interface which is written entirely in Python. Unlike other :mod:`dbm` backends, such as :mod:`dbm.gnu`, no external library is required. -As with other :mod:`dbm` backends, -the keys and values are always stored as :class:`bytes`. The :mod:`!dbm.dumb` module defines the following: diff -Nru python3.11-3.11.8/Doc/library/decimal.rst python3.11-3.11.9/Doc/library/decimal.rst --- python3.11-3.11.8/Doc/library/decimal.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/decimal.rst 2024-04-02 08:25:04.000000000 +0000 @@ -1517,7 +1517,7 @@ the C version uses a thread-local rather than a coroutine-local context and the value is ``False``. This is slightly faster in some nested context scenarios. -.. versionadded:: 3.9 backported to 3.7 and 3.8. +.. versionadded:: 3.8.3 Rounding modes diff -Nru python3.11-3.11.8/Doc/library/enum.rst python3.11-3.11.9/Doc/library/enum.rst --- python3.11-3.11.8/Doc/library/enum.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/enum.rst 2024-04-02 08:25:04.000000000 +0000 @@ -270,6 +270,8 @@ >>> Color.RED.value 1 + Value of the member, can be set in :meth:`~object.__new__`. + .. note:: Enum member values Member values can be anything: :class:`int`, :class:`str`, etc. If @@ -277,6 +279,24 @@ appropriate value will be chosen for you. See :class:`auto` for the details. + While mutable/unhashable values, such as :class:`dict`, :class:`list` or + a mutable :class:`~dataclasses.dataclass`, can be used, they will have a + quadratic performance impact during creation relative to the + total number of mutable/unhashable values in the enum. + + .. attribute:: Enum._name_ + + Name of the member. + + .. attribute:: Enum._value_ + + Value of the member, can be set in :meth:`~object.__new__`. + + .. attribute:: Enum._order_ + + No longer used, kept for backward compatibility. + (class attribute, removed during class creation). + .. attribute:: Enum._ignore_ ``_ignore_`` is only used during creation and is removed from the @@ -493,9 +513,7 @@ >>> list(purple) [, ] - .. versionchanged:: 3.11 - - Aliases are no longer returned during iteration. + .. versionadded:: 3.11 .. method:: __len__(self): @@ -768,18 +786,17 @@ Supported ``_sunder_`` names """""""""""""""""""""""""""" -- ``_name_`` -- name of the member -- ``_value_`` -- value of the member; can be set / modified in ``__new__`` - -- ``_missing_`` -- a lookup function used when a value is not found; may be - overridden -- ``_ignore_`` -- a list of names, either as a :class:`list` or a :class:`str`, - that will not be transformed into members, and will be removed from the final - class -- ``_order_`` -- used in Python 2/3 code to ensure member order is consistent - (class attribute, removed during class creation) -- ``_generate_next_value_`` -- used to get an appropriate value for an enum - member; may be overridden +- :attr:`~Enum._name_` -- name of the member +- :attr:`~Enum._value_` -- value of the member; can be set in ``__new__`` +- :meth:`~Enum._missing_` -- a lookup function used when a value is not found; + may be overridden +- :attr:`~Enum._ignore_` -- a list of names, either as a :class:`list` or a + :class:`str`, that will not be transformed into members, and will be removed + from the final class +- :attr:`~Enum._order_` -- no longer used, kept for backward + compatibility (class attribute, removed during class creation) +- :meth:`~Enum._generate_next_value_` -- used to get an appropriate value for + an enum member; may be overridden .. note:: diff -Nru python3.11-3.11.8/Doc/library/exceptions.rst python3.11-3.11.9/Doc/library/exceptions.rst --- python3.11-3.11.8/Doc/library/exceptions.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/exceptions.rst 2024-04-02 08:25:04.000000000 +0000 @@ -335,9 +335,9 @@ .. note:: - ``NotImplementedError`` and ``NotImplemented`` are not interchangeable, + ``NotImplementedError`` and :data:`NotImplemented` are not interchangeable, even though they have similar names and purposes. See - :data:`NotImplemented` for details on when to use it. + :data:`!NotImplemented` for details on when to use it. .. exception:: OSError([arg]) OSError(errno, strerror[, filename[, winerror[, filename2]]]) diff -Nru python3.11-3.11.8/Doc/library/faulthandler.rst python3.11-3.11.9/Doc/library/faulthandler.rst --- python3.11-3.11.8/Doc/library/faulthandler.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/faulthandler.rst 2024-04-02 08:25:04.000000000 +0000 @@ -118,12 +118,12 @@ This function is implemented using a watchdog thread. - .. versionchanged:: 3.7 - This function is now always available. - .. versionchanged:: 3.5 Added support for passing file descriptor to this function. + .. versionchanged:: 3.7 + This function is now always available. + .. function:: cancel_dump_traceback_later() Cancel the last call to :func:`dump_traceback_later`. diff -Nru python3.11-3.11.8/Doc/library/fcntl.rst python3.11-3.11.9/Doc/library/fcntl.rst --- python3.11-3.11.8/Doc/library/fcntl.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/fcntl.rst 2024-04-02 08:25:04.000000000 +0000 @@ -13,10 +13,10 @@ ---------------- -This module performs file control and I/O control on file descriptors. It is an -interface to the :c:func:`fcntl` and :c:func:`ioctl` Unix routines. For a -complete description of these calls, see :manpage:`fcntl(2)` and -:manpage:`ioctl(2)` Unix manual pages. +This module performs file and I/O control on file descriptors. It is an +interface to the :c:func:`fcntl` and :c:func:`ioctl` Unix routines. +See the :manpage:`fcntl(2)` and :manpage:`ioctl(2)` Unix manual pages +for full details. .. availability:: Unix, not Emscripten, not WASI. @@ -74,7 +74,7 @@ most likely to result in a segmentation violation or a more subtle data corruption. - If the :c:func:`fcntl` fails, an :exc:`OSError` is raised. + If the :c:func:`fcntl` call fails, an :exc:`OSError` is raised. .. audit-event:: fcntl.fcntl fd,cmd,arg fcntl.fcntl @@ -112,7 +112,7 @@ buffer 1024 bytes long which is then passed to :func:`ioctl` and copied back into the supplied buffer. - If the :c:func:`ioctl` fails, an :exc:`OSError` exception is raised. + If the :c:func:`ioctl` call fails, an :exc:`OSError` exception is raised. An example:: @@ -137,7 +137,7 @@ :manpage:`flock(2)` for details. (On some systems, this function is emulated using :c:func:`fcntl`.) - If the :c:func:`flock` fails, an :exc:`OSError` exception is raised. + If the :c:func:`flock` call fails, an :exc:`OSError` exception is raised. .. audit-event:: fcntl.flock fd,operation fcntl.flock @@ -149,17 +149,28 @@ method are accepted as well) of the file to lock or unlock, and *cmd* is one of the following values: - * :const:`LOCK_UN` -- unlock - * :const:`LOCK_SH` -- acquire a shared lock - * :const:`LOCK_EX` -- acquire an exclusive lock - - When *cmd* is :const:`LOCK_SH` or :const:`LOCK_EX`, it can also be - bitwise ORed with :const:`LOCK_NB` to avoid blocking on lock acquisition. - If :const:`LOCK_NB` is used and the lock cannot be acquired, an + .. data:: LOCK_UN + + Release an existing lock. + + .. data:: LOCK_SH + + Acquire a shared lock. + + .. data:: LOCK_EX + + Acquire an exclusive lock. + + .. data:: LOCK_NB + + Bitwise OR with any of the other three ``LOCK_*`` constants to make + the request non-blocking. + + If :const:`!LOCK_NB` is used and the lock cannot be acquired, an :exc:`OSError` will be raised and the exception will have an *errno* - attribute set to :const:`EACCES` or :const:`EAGAIN` (depending on the + attribute set to :const:`~errno.EACCES` or :const:`~errno.EAGAIN` (depending on the operating system; for portability, check for both values). On at least some - systems, :const:`LOCK_EX` can only be used if the file descriptor refers to a + systems, :const:`!LOCK_EX` can only be used if the file descriptor refers to a file opened for writing. *len* is the number of bytes to lock, *start* is the byte offset at diff -Nru python3.11-3.11.8/Doc/library/ftplib.rst python3.11-3.11.9/Doc/library/ftplib.rst --- python3.11-3.11.8/Doc/library/ftplib.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/ftplib.rst 2024-04-02 08:25:04.000000000 +0000 @@ -104,7 +104,7 @@ :param timeout: A timeout in seconds for blocking operations like :meth:`connect` (default: the global default timeout setting). - :type timeout: int | None + :type timeout: float | None :param source_address: |param_doc_source_address| @@ -178,7 +178,7 @@ :param timeout: A timeout in seconds for the connection attempt (default: the global default timeout setting). - :type timeout: int | None + :type timeout: float | None :param source_address: |param_doc_source_address| @@ -232,8 +232,8 @@ .. method:: FTP.voidcmd(cmd) Send a simple command string to the server and handle the response. Return - nothing if a response code corresponding to success (codes in the range - 200--299) is received. Raise :exc:`error_reply` otherwise. + the response string if the response code corresponds to success (codes in + the range 200--299). Raise :exc:`error_reply` otherwise. .. audit-event:: ftplib.sendcmd self,cmd ftplib.FTP.voidcmd @@ -484,7 +484,7 @@ :param timeout: A timeout in seconds for blocking operations like :meth:`~FTP.connect` (default: the global default timeout setting). - :type timeout: int | None + :type timeout: float | None :param source_address: |param_doc_source_address| diff -Nru python3.11-3.11.8/Doc/library/functions.rst python3.11-3.11.9/Doc/library/functions.rst --- python3.11-3.11.8/Doc/library/functions.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/functions.rst 2024-04-02 08:25:04.000000000 +0000 @@ -1568,6 +1568,16 @@ If :func:`sys.displayhook` is not accessible, this function will raise :exc:`RuntimeError`. + This class has a custom representation that can be evaluated:: + + class Person: + def __init__(self, name, age): + self.name = name + self.age = age + + def __repr__(self): + return f"Person('{self.name}', {self.age})" + .. function:: reversed(seq) diff -Nru python3.11-3.11.8/Doc/library/functools.rst python3.11-3.11.9/Doc/library/functools.rst --- python3.11-3.11.8/Doc/library/functools.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/functools.rst 2024-04-02 08:25:04.000000000 +0000 @@ -650,13 +650,9 @@ on the wrapper function). :exc:`AttributeError` is still raised if the wrapper function itself is missing any attributes named in *updated*. - .. versionadded:: 3.2 - Automatic addition of the ``__wrapped__`` attribute. - - .. versionadded:: 3.2 - Copying of the ``__annotations__`` attribute by default. - .. versionchanged:: 3.2 + The ``__wrapped__`` attribute is now automatically added. + The ``__annotations__`` attribute is now copied by default. Missing attributes no longer trigger an :exc:`AttributeError`. .. versionchanged:: 3.4 diff -Nru python3.11-3.11.8/Doc/library/hashlib.rst python3.11-3.11.9/Doc/library/hashlib.rst --- python3.11-3.11.8/Doc/library/hashlib.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/hashlib.rst 2024-04-02 08:25:04.000000000 +0000 @@ -77,8 +77,6 @@ SHA3 (Keccak) and SHAKE constructors :func:`sha3_224`, :func:`sha3_256`, :func:`sha3_384`, :func:`sha3_512`, :func:`shake_128`, :func:`shake_256` were added. - -.. versionadded:: 3.6 :func:`blake2b` and :func:`blake2s` were added. .. _hashlib-usedforsecurity: @@ -116,7 +114,7 @@ Constructors ------------ -.. function:: new(name[, data], \*, usedforsecurity=True) +.. function:: new(name[, data], *, usedforsecurity=True) Is a generic constructor that takes the string *name* of the desired algorithm as its first parameter. It also exists to allow access to the diff -Nru python3.11-3.11.8/Doc/library/http.cookiejar.rst python3.11-3.11.9/Doc/library/http.cookiejar.rst --- python3.11-3.11.8/Doc/library/http.cookiejar.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/http.cookiejar.rst 2024-04-02 08:25:04.000000000 +0000 @@ -649,6 +649,11 @@ :const:`None`. +.. attribute:: Cookie.domain + + Cookie domain (a string). + + .. attribute:: Cookie.path Cookie path (a string, eg. ``'/acme/rocket_launchers'``). diff -Nru python3.11-3.11.8/Doc/library/http.server.rst python3.11-3.11.9/Doc/library/http.server.rst --- python3.11-3.11.8/Doc/library/http.server.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/http.server.rst 2024-04-02 08:25:04.000000000 +0000 @@ -502,6 +502,12 @@ python -m http.server --cgi +.. warning:: + + :class:`CGIHTTPRequestHandler` and the ``--cgi`` command line option + are not intended for use by untrusted clients and may be vulnerable + to exploitation. Always use within a secure environment. + .. _http.server-security: Security Considerations diff -Nru python3.11-3.11.8/Doc/library/idle.rst python3.11-3.11.9/Doc/library/idle.rst --- python3.11-3.11.8/Doc/library/idle.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/idle.rst 2024-04-02 08:25:04.000000000 +0000 @@ -604,7 +604,7 @@ The editing features described in previous subsections work when entering code interactively. IDLE's Shell window also responds to the following: -* :kbd:`C-c` attemps to interrupt statement execution (but may fail). +* :kbd:`C-c` attempts to interrupt statement execution (but may fail). * :kbd:`C-d` closes Shell if typed at a ``>>>`` prompt. diff -Nru python3.11-3.11.8/Doc/library/imaplib.rst python3.11-3.11.9/Doc/library/imaplib.rst --- python3.11-3.11.8/Doc/library/imaplib.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/imaplib.rst 2024-04-02 08:25:04.000000000 +0000 @@ -632,7 +632,7 @@ import getpass, imaplib - M = imaplib.IMAP4() + M = imaplib.IMAP4(host='example.org') M.login(getpass.getuser(), getpass.getpass()) M.select() typ, data = M.search(None, 'ALL') diff -Nru python3.11-3.11.8/Doc/library/importlib.metadata.rst python3.11-3.11.9/Doc/library/importlib.metadata.rst --- python3.11-3.11.8/Doc/library/importlib.metadata.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/importlib.metadata.rst 2024-04-02 08:25:04.000000000 +0000 @@ -218,7 +218,6 @@ The ``Description`` is now included in the metadata when presented through the payload. Line continuation characters have been removed. -.. versionadded:: 3.10 The ``json`` attribute was added. diff -Nru python3.11-3.11.8/Doc/library/importlib.rst python3.11-3.11.9/Doc/library/importlib.rst --- python3.11-3.11.8/Doc/library/importlib.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/importlib.rst 2024-04-02 08:25:04.000000000 +0000 @@ -329,7 +329,7 @@ when invalidating the caches of all finders on :data:`sys.meta_path`. .. versionchanged:: 3.4 - Returns ``None`` when called instead of ``NotImplemented``. + Returns ``None`` when called instead of :data:`NotImplemented`. .. class:: PathEntryFinder diff -Nru python3.11-3.11.8/Doc/library/inspect.rst python3.11-3.11.9/Doc/library/inspect.rst --- python3.11-3.11.8/Doc/library/inspect.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/inspect.rst 2024-04-02 08:25:04.000000000 +0000 @@ -634,9 +634,6 @@ Accepts a wide range of Python callables, from plain functions and classes to :func:`functools.partial` objects. - If the passed object has a ``__signature__`` attribute, this function - returns it without further computations. - For objects defined in modules using stringized annotations (``from __future__ import annotations``), :func:`signature` will attempt to automatically un-stringize the annotations using @@ -671,6 +668,13 @@ Python. For example, in CPython, some built-in functions defined in C provide no metadata about their arguments. + .. impl-detail:: + + If the passed object has a :attr:`!__signature__` attribute, + we may use it to create the signature. + The exact semantics are an implementation detail and are subject to + unannounced changes. Consult the source code for current semantics. + .. class:: Signature(parameters=None, *, return_annotation=Signature.empty) diff -Nru python3.11-3.11.8/Doc/library/ipaddress.rst python3.11-3.11.9/Doc/library/ipaddress.rst --- python3.11-3.11.8/Doc/library/ipaddress.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/ipaddress.rst 2024-04-02 08:25:04.000000000 +0000 @@ -121,22 +121,12 @@ Leading zeros are tolerated, even in ambiguous cases that look like octal notation. - .. versionchanged:: 3.10 + .. versionchanged:: 3.9.5 Leading zeros are no longer tolerated and are treated as an error. IPv4 address strings are now parsed as strict as glibc :func:`~socket.inet_pton`. - .. versionchanged:: 3.9.5 - - The above change was also included in Python 3.9 starting with - version 3.9.5. - - .. versionchanged:: 3.8.12 - - The above change was also included in Python 3.8 starting with - version 3.8.12. - .. attribute:: version The appropriate version number: ``4`` for IPv4, ``6`` for IPv6. diff -Nru python3.11-3.11.8/Doc/library/json.rst python3.11-3.11.9/Doc/library/json.rst --- python3.11-3.11.8/Doc/library/json.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/json.rst 2024-04-02 08:25:04.000000000 +0000 @@ -95,7 +95,7 @@ ... if isinstance(obj, complex): ... return [obj.real, obj.imag] ... # Let the base class default method raise the TypeError - ... return json.JSONEncoder.default(self, obj) + ... return super().default(obj) ... >>> json.dumps(2 + 1j, cls=ComplexEncoder) '[2.0, 1.0]' @@ -493,7 +493,7 @@ else: return list(iterable) # Let the base class default method raise the TypeError - return json.JSONEncoder.default(self, o) + return super().default(o) .. method:: encode(o) diff -Nru python3.11-3.11.8/Doc/library/logging.rst python3.11-3.11.9/Doc/library/logging.rst --- python3.11-3.11.8/Doc/library/logging.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/logging.rst 2024-04-02 08:25:04.000000000 +0000 @@ -30,13 +30,53 @@ can include your own messages integrated with messages from third-party modules. -The simplest example: +Here's a simple example of idiomatic usage: :: + + # myapp.py + import logging + import mylib + logger = logging.getLogger(__name__) + + def main(): + logging.basicConfig(filename='myapp.log', level=logging.INFO) + logger.info('Started') + mylib.do_something() + logger.info('Finished') + + if __name__ == '__main__': + main() + +:: + + # mylib.py + import logging + logger = logging.getLogger(__name__) + + def do_something(): + logger.info('Doing something') + +If you run *myapp.py*, you should see this in *myapp.log*: .. code-block:: none - >>> import logging - >>> logging.warning('Watch out!') - WARNING:root:Watch out! + INFO:__main__:Started + INFO:mylib:Doing something + INFO:__main__:Finished + +The key features of this idiomatic usage is that the majority of code is simply +creating a module level logger with ``getLogger(__name__)``, and using that +logger to do any needed logging. This is concise while allowing downstream code +fine grained control if needed. Logged messages to the module-level logger get +forwarded up to handlers of loggers in higher-level modules, all the way up to +the root logger; for this reason this approach is known as hierarchical logging. + +For logging to be useful, it needs to be configured: setting the levels and +destinations for each logger, potentially changing how specific modules log, +often based on command-line arguments or application configuration. In most +cases, like the one above, only the root logger needs to be so configured, since +all the lower level loggers at module level eventually forward their messages to +its handlers. :func:`~logging.basicConfig` provides a quick way to configure +the root logger that handles many use cases. The module provides a lot of functionality and flexibility. If you are unfamiliar with logging, the best way to get to grips with it is to view the @@ -77,6 +117,27 @@ .. class:: Logger + .. attribute:: Logger.name + + This is the logger's name, and is the value that was passed to :func:`getLogger` + to obtain the logger. + + .. note:: This attribute should be treated as read-only. + + .. attribute:: Logger.level + + The threshold of this logger, as set by the :meth:`setLevel` method. + + .. note:: Do not set this attribute directly - always use :meth:`setLevel`, + which has checks for the level passed to it. + + .. attribute:: Logger.parent + + The parent logger of this logger. It may change based on later instantiation + of loggers which are higher up in the namespace hierarchy. + + .. note:: This value should be treated as read-only. + .. attribute:: Logger.propagate If this attribute evaluates to true, events logged to this logger will be @@ -108,6 +169,21 @@ scenario is to attach handlers only to the root logger, and to let propagation take care of the rest. + .. attribute:: Logger.handlers + + The list of handlers directly attached to this logger instance. + + .. note:: This attribute should be treated as read-only; it is normally changed via + the :meth:`addHandler` and :meth:`removeHandler` methods, which use locks to ensure + thread-safe operation. + + .. attribute:: Logger.disabled + + This attribute disables handling of any events. It is set to ``False`` in the + initializer, and only changed by logging configuration code. + + .. note:: This attribute should be treated as read-only. + .. method:: Logger.setLevel(level) Sets the threshold for this logger to *level*. Logging messages which are less @@ -1081,89 +1157,31 @@ .. function:: debug(msg, *args, **kwargs) - Logs a message with level :const:`DEBUG` on the root logger. The *msg* is the - message format string, and the *args* are the arguments which are merged into - *msg* using the string formatting operator. (Note that this means that you can - use keywords in the format string, together with a single dictionary argument.) - - There are three keyword arguments in *kwargs* which are inspected: *exc_info* - which, if it does not evaluate as false, causes exception information to be - added to the logging message. If an exception tuple (in the format returned by - :func:`sys.exc_info`) or an exception instance is provided, it is used; - otherwise, :func:`sys.exc_info` is called to get the exception information. - - The second optional keyword argument is *stack_info*, which defaults to - ``False``. If true, stack information is added to the logging - message, including the actual logging call. Note that this is not the same - stack information as that displayed through specifying *exc_info*: The - former is stack frames from the bottom of the stack up to the logging call - in the current thread, whereas the latter is information about stack frames - which have been unwound, following an exception, while searching for - exception handlers. - - You can specify *stack_info* independently of *exc_info*, e.g. to just show - how you got to a certain point in your code, even when no exceptions were - raised. The stack frames are printed following a header line which says: - - .. code-block:: none - - Stack (most recent call last): - - This mimics the ``Traceback (most recent call last):`` which is used when - displaying exception frames. - - The third optional keyword argument is *extra* which can be used to pass a - dictionary which is used to populate the __dict__ of the LogRecord created for - the logging event with user-defined attributes. These custom attributes can then - be used as you like. For example, they could be incorporated into logged - messages. For example:: - - FORMAT = '%(asctime)s %(clientip)-15s %(user)-8s %(message)s' - logging.basicConfig(format=FORMAT) - d = {'clientip': '192.168.0.1', 'user': 'fbloggs'} - logging.warning('Protocol problem: %s', 'connection reset', extra=d) - - would print something like: - - .. code-block:: none - - 2006-02-08 22:20:02,165 192.168.0.1 fbloggs Protocol problem: connection reset - - The keys in the dictionary passed in *extra* should not clash with the keys used - by the logging system. (See the :class:`Formatter` documentation for more - information on which keys are used by the logging system.) - - If you choose to use these attributes in logged messages, you need to exercise - some care. In the above example, for instance, the :class:`Formatter` has been - set up with a format string which expects 'clientip' and 'user' in the attribute - dictionary of the LogRecord. If these are missing, the message will not be - logged because a string formatting exception will occur. So in this case, you - always need to pass the *extra* dictionary with these keys. - - While this might be annoying, this feature is intended for use in specialized - circumstances, such as multi-threaded servers where the same code executes in - many contexts, and interesting conditions which arise are dependent on this - context (such as remote client IP address and authenticated user name, in the - above example). In such circumstances, it is likely that specialized - :class:`Formatter`\ s would be used with particular :class:`Handler`\ s. - - This function (as well as :func:`info`, :func:`warning`, :func:`error` and - :func:`critical`) will call :func:`basicConfig` if the root logger doesn't - have any handler attached. + This is a convenience function that calls :meth:`Logger.debug`, on the root + logger. The handling of the arguments is in every way identical + to what is described in that method. + + The only difference is that if the root logger has no handlers, then + :func:`basicConfig` is called, prior to calling ``debug`` on the root logger. + + For very short scripts or quick demonstrations of ``logging`` facilities, + ``debug`` and the other module-level functions may be convenient. However, + most programs will want to carefully and explicitly control the logging + configuration, and should therefore prefer creating a module-level logger and + calling :meth:`Logger.debug` (or other level-specific methods) on it, as + described at the beginnning of this documentation. - .. versionchanged:: 3.2 - The *stack_info* parameter was added. .. function:: info(msg, *args, **kwargs) - Logs a message with level :const:`INFO` on the root logger. The arguments are - interpreted as for :func:`debug`. + Logs a message with level :const:`INFO` on the root logger. The arguments and behavior + are otherwise the same as for :func:`debug`. .. function:: warning(msg, *args, **kwargs) - Logs a message with level :const:`WARNING` on the root logger. The arguments - are interpreted as for :func:`debug`. + Logs a message with level :const:`WARNING` on the root logger. The arguments and behavior + are otherwise the same as for :func:`debug`. .. note:: There is an obsolete function ``warn`` which is functionally identical to ``warning``. As ``warn`` is deprecated, please do not use @@ -1172,26 +1190,26 @@ .. function:: error(msg, *args, **kwargs) - Logs a message with level :const:`ERROR` on the root logger. The arguments are - interpreted as for :func:`debug`. + Logs a message with level :const:`ERROR` on the root logger. The arguments and behavior + are otherwise the same as for :func:`debug`. .. function:: critical(msg, *args, **kwargs) - Logs a message with level :const:`CRITICAL` on the root logger. The arguments - are interpreted as for :func:`debug`. + Logs a message with level :const:`CRITICAL` on the root logger. The arguments and behavior + are otherwise the same as for :func:`debug`. .. function:: exception(msg, *args, **kwargs) - Logs a message with level :const:`ERROR` on the root logger. The arguments are - interpreted as for :func:`debug`. Exception info is added to the logging + Logs a message with level :const:`ERROR` on the root logger. The arguments and behavior + are otherwise the same as for :func:`debug`. Exception info is added to the logging message. This function should only be called from an exception handler. .. function:: log(level, msg, *args, **kwargs) - Logs a message with level *level* on the root logger. The other arguments are - interpreted as for :func:`debug`. + Logs a message with level *level* on the root logger. The arguments and behavior + are otherwise the same as for :func:`debug`. .. function:: disable(level=CRITICAL) diff -Nru python3.11-3.11.8/Doc/library/math.rst python3.11-3.11.9/Doc/library/math.rst --- python3.11-3.11.8/Doc/library/math.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/math.rst 2024-04-02 08:25:04.000000000 +0000 @@ -578,7 +578,7 @@ The :func:`erf` function can be used to compute traditional statistical functions such as the `cumulative standard normal distribution - `_:: + `_:: def phi(x): 'Cumulative distribution function for the standard normal distribution' @@ -666,11 +666,11 @@ >>> math.isnan(float('nan')) True + .. versionadded:: 3.5 + .. versionchanged:: 3.11 It is now always available. - .. versionadded:: 3.5 - .. impl-detail:: diff -Nru python3.11-3.11.8/Doc/library/msvcrt.rst python3.11-3.11.9/Doc/library/msvcrt.rst --- python3.11-3.11.8/Doc/library/msvcrt.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/msvcrt.rst 2024-04-02 08:25:04.000000000 +0000 @@ -157,4 +157,19 @@ .. function:: heapmin() Force the :c:func:`malloc` heap to clean itself up and return unused blocks to - the operating system. On failure, this raises :exc:`OSError`. + the operating system. On failure, this raises :exc:`OSError`. + + +.. data:: CRT_ASSEMBLY_VERSION + + The CRT Assembly version, from the :file:`crtassem.h` header file. + + +.. data:: VC_ASSEMBLY_PUBLICKEYTOKEN + + The VC Assembly public key token, from the :file:`crtassem.h` header file. + + +.. data:: LIBRARIES_ASSEMBLY_NAME_PREFIX + + The Libraries Assembly name prefix, from the :file:`crtassem.h` header file. diff -Nru python3.11-3.11.8/Doc/library/multiprocessing.rst python3.11-3.11.9/Doc/library/multiprocessing.rst --- python3.11-3.11.8/Doc/library/multiprocessing.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/multiprocessing.rst 2024-04-02 08:25:04.000000000 +0000 @@ -137,19 +137,19 @@ Available on Unix platforms which support passing file descriptors over Unix pipes. -.. versionchanged:: 3.8 - - On macOS, the *spawn* start method is now the default. The *fork* start - method should be considered unsafe as it can lead to crashes of the - subprocess. See :issue:`33725`. - .. versionchanged:: 3.4 *spawn* added on all Unix platforms, and *forkserver* added for some Unix platforms. Child processes no longer inherit all of the parents inheritable handles on Windows. -On Unix using the *spawn* or *forkserver* start methods will also +.. versionchanged:: 3.8 + + On macOS, the *spawn* start method is now the default. The *fork* start + method should be considered unsafe as it can lead to crashes of the + subprocess as macOS system libraries may start threads. See :issue:`33725`. + +On POSIX using the *spawn* or *forkserver* start methods will also start a *resource tracker* process which tracks the unlinked named system resources (such as named semaphores or :class:`~multiprocessing.shared_memory.SharedMemory` objects) created @@ -506,7 +506,7 @@ to the process. .. versionchanged:: 3.3 - Added the *daemon* argument. + Added the *daemon* parameter. .. method:: run() @@ -636,8 +636,8 @@ .. method:: terminate() - Terminate the process. On Unix this is done using the ``SIGTERM`` signal; - on Windows :c:func:`TerminateProcess` is used. Note that exit handlers and + Terminate the process. On POSIX this is done using the :py:const:`~signal.SIGTERM` signal; + on Windows :c:func:`!TerminateProcess` is used. Note that exit handlers and finally clauses, etc., will not be executed. Note that descendant processes of the process will *not* be terminated -- @@ -1065,13 +1065,13 @@ or ``None``. ``'fork'`` is the default on Unix, while ``'spawn'`` is the default on Windows and macOS. -.. versionchanged:: 3.8 + .. versionadded:: 3.4 - On macOS, the *spawn* start method is now the default. The *fork* start - method should be considered unsafe as it can lead to crashes of the - subprocess. See :issue:`33725`. + .. versionchanged:: 3.8 - .. versionadded:: 3.4 + On macOS, the *spawn* start method is now the default. The *fork* start + method should be considered unsafe as it can lead to crashes of the + subprocess. See :issue:`33725`. .. function:: set_executable(executable) @@ -1208,8 +1208,7 @@ Connection objects themselves can now be transferred between processes using :meth:`Connection.send` and :meth:`Connection.recv`. - .. versionadded:: 3.3 - Connection objects now support the context management protocol -- see + Connection objects also now support the context management protocol -- see :ref:`typecontextmanager`. :meth:`~contextmanager.__enter__` returns the connection object, and :meth:`~contextmanager.__exit__` calls :meth:`close`. @@ -2212,11 +2211,11 @@ as CPython does not assure that the finalizer of the pool will be called (see :meth:`object.__del__` for more information). - .. versionadded:: 3.2 - *maxtasksperchild* + .. versionchanged:: 3.2 + Added the *maxtasksperchild* parameter. - .. versionadded:: 3.4 - *context* + .. versionchanged:: 3.4 + Added the *context* parameter. .. note:: @@ -2338,7 +2337,7 @@ Wait for the worker processes to exit. One must call :meth:`close` or :meth:`terminate` before using :meth:`join`. - .. versionadded:: 3.3 + .. versionchanged:: 3.3 Pool objects now support the context management protocol -- see :ref:`typecontextmanager`. :meth:`~contextmanager.__enter__` returns the pool object, and :meth:`~contextmanager.__exit__` calls :meth:`terminate`. @@ -2507,7 +2506,7 @@ The address from which the last accepted connection came. If this is unavailable then it is ``None``. - .. versionadded:: 3.3 + .. versionchanged:: 3.3 Listener objects now support the context management protocol -- see :ref:`typecontextmanager`. :meth:`~contextmanager.__enter__` returns the listener object, and :meth:`~contextmanager.__exit__` calls :meth:`close`. @@ -2938,7 +2937,7 @@ The *spawn* and *forkserver* start methods ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -There are a few extra restriction which don't apply to the *fork* +There are a few extra restrictions which don't apply to the *fork* start method. More picklability diff -Nru python3.11-3.11.8/Doc/library/numbers.rst python3.11-3.11.9/Doc/library/numbers.rst --- python3.11-3.11.8/Doc/library/numbers.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/numbers.rst 2024-04-02 08:25:04.000000000 +0000 @@ -166,7 +166,7 @@ 2. If ``A`` falls back to the boilerplate code, and it were to return a value from :meth:`~object.__add__`, we'd miss the possibility that ``B`` defines a more intelligent :meth:`~object.__radd__`, so the - boilerplate should return :const:`NotImplemented` from + boilerplate should return :data:`NotImplemented` from :meth:`!__add__`. (Or ``A`` may not implement :meth:`!__add__` at all.) 3. Then ``B``'s :meth:`~object.__radd__` gets a chance. If it accepts diff -Nru python3.11-3.11.8/Doc/library/os.rst python3.11-3.11.9/Doc/library/os.rst --- python3.11-3.11.8/Doc/library/os.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/os.rst 2024-04-02 08:25:04.000000000 +0000 @@ -2366,7 +2366,6 @@ .. versionchanged:: 3.8 Accepts a :term:`path-like object` and a bytes object on Windows. - .. versionchanged:: 3.8 Added support for directory junctions, and changed to return the substitution path (which typically includes ``\\?\`` prefix) rather than the optional "print name" field that was previously returned. @@ -2848,14 +2847,14 @@ Time of most recent access expressed in nanoseconds as an integer. - .. versionadded: 3.3 + .. versionadded:: 3.3 .. attribute:: st_mtime_ns Time of most recent content modification expressed in nanoseconds as an integer. - .. versionadded: 3.3 + .. versionadded:: 3.3 .. attribute:: st_ctime_ns @@ -2865,7 +2864,7 @@ * the time of creation on Windows, expressed in nanoseconds as an integer. - .. versionadded: 3.3 + .. versionadded:: 3.3 .. note:: @@ -2975,10 +2974,10 @@ Windows now returns the file index as :attr:`st_ino` when available. - .. versionadded:: 3.7 + .. versionchanged:: 3.7 Added the :attr:`st_fstype` member to Solaris/derivatives. - .. versionadded:: 3.8 + .. versionchanged:: 3.8 Added the :attr:`st_reparse_tag` member on Windows. .. versionchanged:: 3.8 @@ -5121,20 +5120,20 @@ easy-to-use interface to the random number generator provided by your platform, please see :class:`random.SystemRandom`. - .. versionchanged:: 3.6.0 - On Linux, ``getrandom()`` is now used in blocking mode to increase the - security. - - .. versionchanged:: 3.5.2 - On Linux, if the ``getrandom()`` syscall blocks (the urandom entropy pool - is not initialized yet), fall back on reading ``/dev/urandom``. - .. versionchanged:: 3.5 On Linux 3.17 and newer, the ``getrandom()`` syscall is now used when available. On OpenBSD 5.6 and newer, the C ``getentropy()`` function is now used. These functions avoid the usage of an internal file descriptor. + .. versionchanged:: 3.5.2 + On Linux, if the ``getrandom()`` syscall blocks (the urandom entropy pool + is not initialized yet), fall back on reading ``/dev/urandom``. + + .. versionchanged:: 3.6 + On Linux, ``getrandom()`` is now used in blocking mode to increase the + security. + .. versionchanged:: 3.11 On Windows, ``BCryptGenRandom()`` is used instead of ``CryptGenRandom()`` which is deprecated. diff -Nru python3.11-3.11.8/Doc/library/pdb.rst python3.11-3.11.9/Doc/library/pdb.rst --- python3.11-3.11.8/Doc/library/pdb.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/pdb.rst 2024-04-02 08:25:04.000000000 +0000 @@ -269,19 +269,20 @@ If a file :file:`.pdbrc` exists in the user's home directory or in the current directory, it is read with ``'utf-8'`` encoding and executed as if it had been -typed at the debugger prompt. This is particularly useful for aliases. If both +typed at the debugger prompt, with the exception that empty lines and lines +starting with ``#`` are ignored. This is particularly useful for aliases. If both files exist, the one in the home directory is read first and aliases defined there can be overridden by the local file. -.. versionchanged:: 3.11 - :file:`.pdbrc` is now read with ``'utf-8'`` encoding. Previously, it was read - with the system locale encoding. - .. versionchanged:: 3.2 :file:`.pdbrc` can now contain commands that continue debugging, such as :pdbcmd:`continue` or :pdbcmd:`next`. Previously, these commands had no effect. +.. versionchanged:: 3.11 + :file:`.pdbrc` is now read with ``'utf-8'`` encoding. Previously, it was read + with the system locale encoding. + .. pdbcommand:: h(elp) [command] diff -Nru python3.11-3.11.8/Doc/library/pickle.rst python3.11-3.11.9/Doc/library/pickle.rst --- python3.11-3.11.8/Doc/library/pickle.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/pickle.rst 2024-04-02 08:25:04.000000000 +0000 @@ -373,7 +373,7 @@ Special reducer that can be defined in :class:`Pickler` subclasses. This method has priority over any reducer in the :attr:`dispatch_table`. It should conform to the same interface as a :meth:`~object.__reduce__` method, and - can optionally return ``NotImplemented`` to fallback on + can optionally return :data:`NotImplemented` to fallback on :attr:`dispatch_table`-registered reducers to pickle ``obj``. For a detailed example, see :ref:`reducer_override`. @@ -495,7 +495,7 @@ The following types can be pickled: * built-in constants (``None``, ``True``, ``False``, ``Ellipsis``, and - ``NotImplemented``); + :data:`NotImplemented`); * integers, floating-point numbers, complex numbers; @@ -645,8 +645,8 @@ .. note:: - If :meth:`__getstate__` returns a false value, the :meth:`__setstate__` - method will not be called upon unpickling. + If :meth:`__reduce__` returns a state with value ``None`` at pickling, + the :meth:`__setstate__` method will not be called upon unpickling. Refer to the section :ref:`pickle-state` for more information about how to use @@ -897,7 +897,7 @@ For those cases, it is possible to subclass from the :class:`Pickler` class and implement a :meth:`~Pickler.reducer_override` method. This method can return an arbitrary reduction tuple (see :meth:`~object.__reduce__`). It can alternatively return -``NotImplemented`` to fallback to the traditional behavior. +:data:`NotImplemented` to fallback to the traditional behavior. If both the :attr:`~Pickler.dispatch_table` and :meth:`~Pickler.reducer_override` are defined, then diff -Nru python3.11-3.11.8/Doc/library/pprint.rst python3.11-3.11.9/Doc/library/pprint.rst --- python3.11-3.11.8/Doc/library/pprint.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/pprint.rst 2024-04-02 08:25:04.000000000 +0000 @@ -31,7 +31,93 @@ .. versionchanged:: 3.10 Added support for pretty-printing :class:`dataclasses.dataclass`. -The :mod:`pprint` module defines one class: +.. _pprint-functions: + +Functions +--------- + +.. function:: pp(object, *args, sort_dicts=False, **kwargs) + + Prints the formatted representation of *object* followed by a newline. + If *sort_dicts* is false (the default), dictionaries will be displayed with + their keys in insertion order, otherwise the dict keys will be sorted. + *args* and *kwargs* will be passed to :func:`~pprint.pprint` as formatting + parameters. + + >>> import pprint + >>> stuff = ['spam', 'eggs', 'lumberjack', 'knights', 'ni'] + >>> stuff.insert(0, stuff) + >>> pprint.pp(stuff) + [, + 'spam', + 'eggs', + 'lumberjack', + 'knights', + 'ni'] + + .. versionadded:: 3.8 + + +.. function:: pprint(object, stream=None, indent=1, width=80, depth=None, *, \ + compact=False, sort_dicts=True, underscore_numbers=False) + + Prints the formatted representation of *object* on *stream*, followed by a + newline. If *stream* is ``None``, :data:`sys.stdout` is used. This may be used + in the interactive interpreter instead of the :func:`print` function for + inspecting values (you can even reassign ``print = pprint.pprint`` for use + within a scope). + + The configuration parameters *stream*, *indent*, *width*, *depth*, + *compact*, *sort_dicts* and *underscore_numbers* are passed to the + :class:`PrettyPrinter` constructor and their meanings are as + described in its documentation below. + + Note that *sort_dicts* is ``True`` by default and you might want to use + :func:`~pprint.pp` instead where it is ``False`` by default. + +.. function:: pformat(object, indent=1, width=80, depth=None, *, \ + compact=False, sort_dicts=True, underscore_numbers=False) + + Return the formatted representation of *object* as a string. *indent*, + *width*, *depth*, *compact*, *sort_dicts* and *underscore_numbers* are + passed to the :class:`PrettyPrinter` constructor as formatting parameters + and their meanings are as described in its documentation below. + + +.. function:: isreadable(object) + + .. index:: pair: built-in function; eval + + Determine if the formatted representation of *object* is "readable", or can be + used to reconstruct the value using :func:`eval`. This always returns ``False`` + for recursive objects. + + >>> pprint.isreadable(stuff) + False + + +.. function:: isrecursive(object) + + Determine if *object* requires a recursive representation. + + +.. function:: saferepr(object) + + Return a string representation of *object*, protected against recursive data + structures. If the representation of *object* exposes a recursive entry, the + recursive reference will be represented as ````. The representation is not otherwise formatted. + + >>> pprint.saferepr(stuff) + "[, 'spam', 'eggs', 'lumberjack', 'knights', 'ni']" + + +.. _prettyprinter-objects: + +PrettyPrinter Objects +--------------------- + +This module defines one class: .. First the implementation class: @@ -44,9 +130,9 @@ Construct a :class:`PrettyPrinter` instance. This constructor understands several keyword parameters. - *stream* (default ``sys.stdout``) is a :term:`file-like object` to + *stream* (default :data:`!sys.stdout`) is a :term:`file-like object` to which the output will be written by calling its :meth:`!write` method. - If both *stream* and ``sys.stdout`` are ``None``, then + If both *stream* and :data:`!sys.stdout` are ``None``, then :meth:`~PrettyPrinter.pprint` silently returns. Other values configure the manner in which nesting of complex data @@ -87,7 +173,7 @@ Added the *underscore_numbers* parameter. .. versionchanged:: 3.11 - No longer attempts to write to ``sys.stdout`` if it is ``None``. + No longer attempts to write to :data:`!sys.stdout` if it is ``None``. >>> import pprint >>> stuff = ['spam', 'eggs', 'lumberjack', 'knights', 'ni'] @@ -112,85 +198,6 @@ >>> pp.pprint(tup) ('spam', ('eggs', ('lumberjack', ('knights', ('ni', ('dead', (...))))))) -.. function:: pformat(object, indent=1, width=80, depth=None, *, \ - compact=False, sort_dicts=True, underscore_numbers=False) - - Return the formatted representation of *object* as a string. *indent*, - *width*, *depth*, *compact*, *sort_dicts* and *underscore_numbers* are - passed to the :class:`PrettyPrinter` constructor as formatting parameters - and their meanings are as described in its documentation above. - - -.. function:: pp(object, *args, sort_dicts=False, **kwargs) - - Prints the formatted representation of *object* followed by a newline. - If *sort_dicts* is false (the default), dictionaries will be displayed with - their keys in insertion order, otherwise the dict keys will be sorted. - *args* and *kwargs* will be passed to :func:`pprint` as formatting - parameters. - - .. versionadded:: 3.8 - - -.. function:: pprint(object, stream=None, indent=1, width=80, depth=None, *, \ - compact=False, sort_dicts=True, underscore_numbers=False) - - Prints the formatted representation of *object* on *stream*, followed by a - newline. If *stream* is ``None``, ``sys.stdout`` is used. This may be used - in the interactive interpreter instead of the :func:`print` function for - inspecting values (you can even reassign ``print = pprint.pprint`` for use - within a scope). - - The configuration parameters *stream*, *indent*, *width*, *depth*, - *compact*, *sort_dicts* and *underscore_numbers* are passed to the - :class:`PrettyPrinter` constructor and their meanings are as - described in its documentation above. - - >>> import pprint - >>> stuff = ['spam', 'eggs', 'lumberjack', 'knights', 'ni'] - >>> stuff.insert(0, stuff) - >>> pprint.pprint(stuff) - [, - 'spam', - 'eggs', - 'lumberjack', - 'knights', - 'ni'] - -.. function:: isreadable(object) - - .. index:: pair: built-in function; eval - - Determine if the formatted representation of *object* is "readable", or can be - used to reconstruct the value using :func:`eval`. This always returns ``False`` - for recursive objects. - - >>> pprint.isreadable(stuff) - False - - -.. function:: isrecursive(object) - - Determine if *object* requires a recursive representation. - - -One more support function is also defined: - -.. function:: saferepr(object) - - Return a string representation of *object*, protected against recursive data - structures. If the representation of *object* exposes a recursive entry, the - recursive reference will be represented as ````. The representation is not otherwise formatted. - - >>> pprint.saferepr(stuff) - "[, 'spam', 'eggs', 'lumberjack', 'knights', 'ni']" - - -.. _prettyprinter-objects: - -PrettyPrinter Objects ---------------------- :class:`PrettyPrinter` instances have the following methods: @@ -254,7 +261,7 @@ Example ------- -To demonstrate several uses of the :func:`pprint` function and its parameters, +To demonstrate several uses of the :func:`~pprint.pp` function and its parameters, let's fetch information about a project from `PyPI `_:: >>> import json @@ -263,9 +270,9 @@ >>> with urlopen('https://pypi.org/pypi/sampleproject/json') as resp: ... project_info = json.load(resp)['info'] -In its basic form, :func:`pprint` shows the whole object:: +In its basic form, :func:`~pprint.pp` shows the whole object:: - >>> pprint.pprint(project_info) + >>> pprint.pp(project_info) {'author': 'The Python Packaging Authority', 'author_email': 'pypa-dev@googlegroups.com', 'bugtrack_url': None, @@ -322,7 +329,7 @@ The result can be limited to a certain *depth* (ellipsis is used for deeper contents):: - >>> pprint.pprint(project_info, depth=1) + >>> pprint.pp(project_info, depth=1) {'author': 'The Python Packaging Authority', 'author_email': 'pypa-dev@googlegroups.com', 'bugtrack_url': None, @@ -368,7 +375,7 @@ Additionally, maximum character *width* can be suggested. If a long object cannot be split, the specified width will be exceeded:: - >>> pprint.pprint(project_info, depth=1, width=60) + >>> pprint.pp(project_info, depth=1, width=60) {'author': 'The Python Packaging Authority', 'author_email': 'pypa-dev@googlegroups.com', 'bugtrack_url': None, diff -Nru python3.11-3.11.8/Doc/library/pydoc.rst python3.11-3.11.9/Doc/library/pydoc.rst --- python3.11-3.11.8/Doc/library/pydoc.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/pydoc.rst 2024-04-02 08:25:04.000000000 +0000 @@ -16,19 +16,19 @@ -------------- -The :mod:`pydoc` module automatically generates documentation from Python +The :mod:`!pydoc` module automatically generates documentation from Python modules. The documentation can be presented as pages of text on the console, served to a web browser, or saved to HTML files. For modules, classes, functions and methods, the displayed documentation is -derived from the docstring (i.e. the :attr:`__doc__` attribute) of the object, +derived from the docstring (i.e. the :attr:`!__doc__` attribute) of the object, and recursively of its documentable members. If there is no docstring, -:mod:`pydoc` tries to obtain a description from the block of comment lines just +:mod:`!pydoc` tries to obtain a description from the block of comment lines just above the definition of the class, function or method in the source file, or at the top of the module (see :func:`inspect.getcomments`). The built-in function :func:`help` invokes the online help system in the -interactive interpreter, which uses :mod:`pydoc` to generate its documentation +interactive interpreter, which uses :mod:`!pydoc` to generate its documentation as text on the console. The same text documentation can also be viewed from outside the Python interpreter by running :program:`pydoc` as a script at the operating system's command prompt. For example, running :: @@ -46,7 +46,7 @@ .. note:: - In order to find objects and their documentation, :mod:`pydoc` imports the + In order to find objects and their documentation, :mod:`!pydoc` imports the module(s) to be documented. Therefore, any code on module level will be executed on that occasion. Use an ``if __name__ == '__main__':`` guard to only execute code when a file is invoked as a script and not just imported. @@ -90,7 +90,7 @@ Module docs for core modules are assumed to reside in ``https://docs.python.org/X.Y/library/`` where ``X`` and ``Y`` are the major and minor version numbers of the Python interpreter. This can -be overridden by setting the :envvar:`PYTHONDOCS` environment variable +be overridden by setting the :envvar:`!PYTHONDOCS` environment variable to a different URL or to a local directory containing the Library Reference Manual pages. @@ -101,7 +101,7 @@ The ``-g`` command line option was removed. .. versionchanged:: 3.4 - :mod:`pydoc` now uses :func:`inspect.signature` rather than + :mod:`!pydoc` now uses :func:`inspect.signature` rather than :func:`inspect.getfullargspec` to extract signature information from callables. diff -Nru python3.11-3.11.8/Doc/library/pyexpat.rst python3.11-3.11.9/Doc/library/pyexpat.rst --- python3.11-3.11.8/Doc/library/pyexpat.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/pyexpat.rst 2024-04-02 08:25:04.000000000 +0000 @@ -196,6 +196,42 @@ :exc:`ExpatError` to be raised with the :attr:`code` attribute set to ``errors.codes[errors.XML_ERROR_CANT_CHANGE_FEATURE_ONCE_PARSING]``. +.. method:: xmlparser.SetReparseDeferralEnabled(enabled) + + .. warning:: + + Calling ``SetReparseDeferralEnabled(False)`` has security implications, + as detailed below; please make sure to understand these consequences + prior to using the ``SetReparseDeferralEnabled`` method. + + Expat 2.6.0 introduced a security mechanism called "reparse deferral" + where instead of causing denial of service through quadratic runtime + from reparsing large tokens, reparsing of unfinished tokens is now delayed + by default until a sufficient amount of input is reached. + Due to this delay, registered handlers may — depending of the sizing of + input chunks pushed to Expat — no longer be called right after pushing new + input to the parser. Where immediate feedback and taking over responsiblity + of protecting against denial of service from large tokens are both wanted, + calling ``SetReparseDeferralEnabled(False)`` disables reparse deferral + for the current Expat parser instance, temporarily or altogether. + Calling ``SetReparseDeferralEnabled(True)`` allows re-enabling reparse + deferral. + + Note that :meth:`SetReparseDeferralEnabled` has been backported to some + prior releases of CPython as a security fix. Check for availability of + :meth:`SetReparseDeferralEnabled` using :func:`hasattr` if used in code + running across a variety of Python versions. + + .. versionadded:: 3.11.9 + +.. method:: xmlparser.GetReparseDeferralEnabled() + + Returns whether reparse deferral is currently enabled for the given + Expat parser instance. + + .. versionadded:: 3.11.9 + + :class:`xmlparser` objects have the following attributes: @@ -214,7 +250,8 @@ :meth:`CharacterDataHandler` callback whenever possible. This can improve performance substantially since Expat normally breaks character data into chunks at every line ending. This attribute is false by default, and may be changed at - any time. + any time. Note that when it is false, data that does not contain newlines + may be chunked too. .. attribute:: xmlparser.buffer_used @@ -372,7 +409,10 @@ marked content, and ignorable whitespace. Applications which must distinguish these cases can use the :attr:`StartCdataSectionHandler`, :attr:`EndCdataSectionHandler`, and :attr:`ElementDeclHandler` callbacks to - collect the required information. + collect the required information. Note that the character data may be + chunked even if it is short and so you may receive more than one call to + :meth:`CharacterDataHandler`. Set the :attr:`buffer_text` instance attribute + to ``True`` to avoid that. .. method:: xmlparser.UnparsedEntityDeclHandler(entityName, base, systemId, publicId, notationName) diff -Nru python3.11-3.11.8/Doc/library/random.rst python3.11-3.11.9/Doc/library/random.rst --- python3.11-3.11.8/Doc/library/random.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/random.rst 2024-04-02 08:25:04.000000000 +0000 @@ -280,7 +280,8 @@ ``a <= b`` and ``b <= N <= a`` for ``b < a``. The end-point value ``b`` may or may not be included in the range - depending on floating-point rounding in the equation ``a + (b-a) * random()``. + depending on floating-point rounding in the expression + ``a + (b-a) * random()``. .. function:: triangular(low, high, mode) diff -Nru python3.11-3.11.8/Doc/library/re.rst python3.11-3.11.9/Doc/library/re.rst --- python3.11-3.11.8/Doc/library/re.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/re.rst 2024-04-02 08:25:04.000000000 +0000 @@ -1336,7 +1336,8 @@ Escapes such as ``\n`` are converted to the appropriate characters, and numeric backreferences (``\1``, ``\2``) and named backreferences (``\g<1>``, ``\g``) are replaced by the contents of the - corresponding group. + corresponding group. The backreference ``\g<0>`` will be + replaced by the entire match. .. versionchanged:: 3.5 Unmatched groups are replaced with an empty string. @@ -1589,7 +1590,7 @@ Simulating scanf() ^^^^^^^^^^^^^^^^^^ -.. index:: single: scanf() +.. index:: single: scanf (C function) Python does not currently have an equivalent to :c:func:`!scanf`. Regular expressions are generally more powerful, though also more verbose, than diff -Nru python3.11-3.11.8/Doc/library/resource.rst python3.11-3.11.9/Doc/library/resource.rst --- python3.11-3.11.8/Doc/library/resource.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/resource.rst 2024-04-02 08:25:04.000000000 +0000 @@ -177,6 +177,8 @@ The largest area of mapped memory which the process may occupy. + .. availability:: FreeBSD >= 11. + .. data:: RLIMIT_AS diff -Nru python3.11-3.11.8/Doc/library/sched.rst python3.11-3.11.9/Doc/library/sched.rst --- python3.11-3.11.8/Doc/library/sched.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/sched.rst 2024-04-02 08:25:04.000000000 +0000 @@ -36,7 +36,7 @@ Example:: >>> import sched, time - >>> s = sched.scheduler(time.monotonic, time.sleep) + >>> s = sched.scheduler(time.time, time.sleep) >>> def print_time(a='default'): ... print("From print_time", time.time(), a) ... diff -Nru python3.11-3.11.8/Doc/library/shutil.rst python3.11-3.11.9/Doc/library/shutil.rst --- python3.11-3.11.8/Doc/library/shutil.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/shutil.rst 2024-04-02 08:25:04.000000000 +0000 @@ -39,7 +39,7 @@ .. function:: copyfileobj(fsrc, fdst[, length]) - Copy the contents of the file-like object *fsrc* to the file-like object *fdst*. + Copy the contents of the :term:`file-like object ` *fsrc* to the file-like object *fdst*. The integer *length*, if given, is the buffer size. In particular, a negative *length* value means to copy the data without looping over the source data in chunks; by default the data is read in chunks to avoid uncontrolled memory @@ -52,7 +52,7 @@ Copy the contents (no metadata) of the file named *src* to a file named *dst* and return *dst* in the most efficient way possible. - *src* and *dst* are path-like objects or path names given as strings. + *src* and *dst* are :term:`path-like objects ` or path names given as strings. *dst* must be the complete target file name; look at :func:`~shutil.copy` for a copy that accepts a target directory path. If *src* and *dst* @@ -94,7 +94,7 @@ .. function:: copymode(src, dst, *, follow_symlinks=True) Copy the permission bits from *src* to *dst*. The file contents, owner, and - group are unaffected. *src* and *dst* are path-like objects or path names + group are unaffected. *src* and *dst* are :term:`path-like objects ` or path names given as strings. If *follow_symlinks* is false, and both *src* and *dst* are symbolic links, :func:`copymode` will attempt to modify the mode of *dst* itself (rather @@ -113,7 +113,7 @@ Copy the permission bits, last access time, last modification time, and flags from *src* to *dst*. On Linux, :func:`copystat` also copies the "extended attributes" where possible. The file contents, owner, and - group are unaffected. *src* and *dst* are path-like objects or path + group are unaffected. *src* and *dst* are :term:`path-like objects ` or path names given as strings. If *follow_symlinks* is false, and *src* and *dst* both @@ -274,16 +274,16 @@ .. audit-event:: shutil.copytree src,dst shutil.copytree - .. versionchanged:: 3.3 - Copy metadata when *symlinks* is false. - Now returns *dst*. - .. versionchanged:: 3.2 Added the *copy_function* argument to be able to provide a custom copy function. Added the *ignore_dangling_symlinks* argument to silence dangling symlinks errors when *symlinks* is false. + .. versionchanged:: 3.3 + Copy metadata when *symlinks* is false. + Now returns *dst*. + .. versionchanged:: 3.8 Platform-specific fast-copy syscalls may be used internally in order to copy the file more efficiently. See diff -Nru python3.11-3.11.8/Doc/library/socket.rst python3.11-3.11.9/Doc/library/socket.rst --- python3.11-3.11.8/Doc/library/socket.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/socket.rst 2024-04-02 08:25:04.000000000 +0000 @@ -1506,8 +1506,9 @@ Receive data from the socket. The return value is a bytes object representing the data received. The maximum amount of data to be received at once is specified - by *bufsize*. See the Unix manual page :manpage:`recv(2)` for the meaning of - the optional argument *flags*; it defaults to zero. + by *bufsize*. A returned empty bytes object indicates that the client has disconnected. + See the Unix manual page :manpage:`recv(2)` for the meaning of the optional argument + *flags*; it defaults to zero. .. note:: diff -Nru python3.11-3.11.8/Doc/library/ssl.rst python3.11-3.11.9/Doc/library/ssl.rst --- python3.11-3.11.8/Doc/library/ssl.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/ssl.rst 2024-04-02 08:25:04.000000000 +0000 @@ -846,11 +846,11 @@ When Python has been compiled against an older version of OpenSSL, the flag defaults to *0*. - .. versionadded:: 3.7 + .. versionadded:: 3.6.3 .. deprecated:: 3.7 - The option is deprecated since OpenSSL 1.1.0. It was added to 2.7.15, - 3.6.3 and 3.7.0 for backwards compatibility with OpenSSL 1.0.2. + The option is deprecated since OpenSSL 1.1.0. It was added to 2.7.15 and + 3.6.3 for backwards compatibility with OpenSSL 1.0.2. .. data:: OP_NO_RENEGOTIATION @@ -1860,6 +1860,9 @@ *session*, see :attr:`~SSLSocket.session`. + To wrap an :class:`SSLSocket` in another :class:`SSLSocket`, use + :meth:`SSLContext.wrap_bio`. + .. versionchanged:: 3.5 Always allow a server_hostname to be passed, even if OpenSSL does not have SNI. @@ -2045,7 +2048,7 @@ .. versionchanged:: 3.10 - The flag had no effect with OpenSSL before version 1.1.1k. Python 3.8.9, + The flag had no effect with OpenSSL before version 1.1.1l. Python 3.8.9, 3.9.3, and 3.10 include workarounds for previous versions. .. attribute:: SSLContext.security_level diff -Nru python3.11-3.11.8/Doc/library/stdtypes.rst python3.11-3.11.9/Doc/library/stdtypes.rst --- python3.11-3.11.8/Doc/library/stdtypes.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/stdtypes.rst 2024-04-02 08:25:04.000000000 +0000 @@ -1451,8 +1451,7 @@ sequence of values they define (instead of comparing based on object identity). -.. versionadded:: 3.3 - The :attr:`~range.start`, :attr:`~range.stop` and :attr:`~range.step` + Added the :attr:`~range.start`, :attr:`~range.stop` and :attr:`~range.step` attributes. .. seealso:: @@ -5392,10 +5391,10 @@ This object is returned from comparisons and binary operations when they are asked to operate on types they don't support. See :ref:`comparisons` for more -information. There is exactly one ``NotImplemented`` object. -``type(NotImplemented)()`` produces the singleton instance. +information. There is exactly one :data:`NotImplemented` object. +:code:`type(NotImplemented)()` produces the singleton instance. -It is written as ``NotImplemented``. +It is written as :code:`NotImplemented`. .. _bltin-boolean-values: diff -Nru python3.11-3.11.8/Doc/library/struct.rst python3.11-3.11.9/Doc/library/struct.rst --- python3.11-3.11.8/Doc/library/struct.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/struct.rst 2024-04-02 08:25:04.000000000 +0000 @@ -156,6 +156,21 @@ If the first character is not one of these, ``'@'`` is assumed. +.. note:: + + The number 1023 (``0x3ff`` in hexadecimal) has the following byte representations: + + * ``03 ff`` in big-endian (``>``) + * ``ff 03`` in little-endian (``<``) + + Python example: + + >>> import struct + >>> struct.pack('>h', 1023) + b'\x03\xff' + >>> struct.pack('`. + + (2) When used with the :func:`strptime` function, the ``%p`` directive only affects the output hour field if the ``%I`` directive is used to parse the hour. .. _leap-second: - (2) + (3) The range really is ``0`` to ``61``; value ``60`` is valid in timestamps representing `leap seconds`_ and value ``61`` is supported for historical reasons. - (3) + (4) When used with the :func:`strptime` function, ``%U`` and ``%W`` are only used in calculations when the day of the week and the year are specified. diff -Nru python3.11-3.11.8/Doc/library/types.rst python3.11-3.11.9/Doc/library/types.rst --- python3.11-3.11.8/Doc/library/types.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/types.rst 2024-04-02 08:25:04.000000000 +0000 @@ -148,7 +148,7 @@ .. index:: pair: built-in function; compile - The type for code objects such as returned by :func:`compile`. + The type of :ref:`code objects ` such as returned by :func:`compile`. .. audit-event:: code.__new__ code,filename,name,argcount,posonlyargcount,kwonlyargcount,nlocals,stacksize,flags types.CodeType @@ -156,12 +156,6 @@ required by the initializer. The audit event only occurs for direct instantiation of code objects, and is not raised for normal compilation. - .. method:: CodeType.replace(**kwargs) - - Return a copy of the code object with new values for the specified fields. - - .. versionadded:: 3.8 - .. data:: CellType The type for cell objects: such objects are used as containers for diff -Nru python3.11-3.11.8/Doc/library/typing.rst python3.11-3.11.9/Doc/library/typing.rst --- python3.11-3.11.8/Doc/library/typing.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/typing.rst 2024-04-02 08:25:04.000000000 +0000 @@ -896,7 +896,6 @@ be used for this concept instead. Type checkers should treat the two equivalently. - .. versionadded:: 3.5.4 .. versionadded:: 3.6.2 .. data:: Self @@ -2931,7 +2930,6 @@ Deprecated alias to :class:`collections.ChainMap`. - .. versionadded:: 3.5.4 .. versionadded:: 3.6.1 .. deprecated:: 3.9 @@ -2942,7 +2940,6 @@ Deprecated alias to :class:`collections.Counter`. - .. versionadded:: 3.5.4 .. versionadded:: 3.6.1 .. deprecated:: 3.9 @@ -2953,7 +2950,6 @@ Deprecated alias to :class:`collections.deque`. - .. versionadded:: 3.5.4 .. versionadded:: 3.6.1 .. deprecated:: 3.9 @@ -3032,7 +3028,7 @@ Deprecated alias to :class:`collections.abc.Collection`. - .. versionadded:: 3.6.0 + .. versionadded:: 3.6 .. deprecated:: 3.9 :class:`collections.abc.Collection` now supports subscripting (``[]``). @@ -3318,7 +3314,6 @@ Deprecated alias to :class:`contextlib.AbstractContextManager`. .. versionadded:: 3.5.4 - .. versionadded:: 3.6.0 .. deprecated:: 3.9 :class:`contextlib.AbstractContextManager` @@ -3329,7 +3324,6 @@ Deprecated alias to :class:`contextlib.AbstractAsyncContextManager`. - .. versionadded:: 3.5.4 .. versionadded:: 3.6.2 .. deprecated:: 3.9 diff -Nru python3.11-3.11.8/Doc/library/unittest.mock.rst python3.11-3.11.9/Doc/library/unittest.mock.rst --- python3.11-3.11.8/Doc/library/unittest.mock.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/unittest.mock.rst 2024-04-02 08:25:04.000000000 +0000 @@ -2092,10 +2092,10 @@ Methods and their defaults: -* ``__lt__``: ``NotImplemented`` -* ``__gt__``: ``NotImplemented`` -* ``__le__``: ``NotImplemented`` -* ``__ge__``: ``NotImplemented`` +* ``__lt__``: :data:`NotImplemented` +* ``__gt__``: :data:`!NotImplemented` +* ``__le__``: :data:`!NotImplemented` +* ``__ge__``: :data:`!NotImplemented` * ``__int__``: ``1`` * ``__contains__``: ``False`` * ``__len__``: ``0`` @@ -2375,6 +2375,14 @@ >>> m.mock_calls == [call(1), call(1, 2), ANY] True +:data:`ANY` is not limited to comparisons with call objects and so +can also be used in test assertions:: + + class TestStringMethods(unittest.TestCase): + + def test_split(self): + s = 'hello world' + self.assertEqual(s.split(), ['hello', ANY]) FILTER_DIR @@ -2772,3 +2780,123 @@ >>> mock.not_submock.attribute2 # This won't raise. .. versionadded:: 3.7 + + +Order of precedence of :attr:`side_effect`, :attr:`return_value` and *wraps* +---------------------------------------------------------------------------- + +The order of their precedence is: + +1. :attr:`~Mock.side_effect` +2. :attr:`~Mock.return_value` +3. *wraps* + +If all three are set, mock will return the value from :attr:`~Mock.side_effect`, +ignoring :attr:`~Mock.return_value` and the wrapped object altogether. If any +two are set, the one with the higher precedence will return the value. +Regardless of the order of which was set first, the order of precedence +remains unchanged. + + >>> from unittest.mock import Mock + >>> class Order: + ... @staticmethod + ... def get_value(): + ... return "third" + ... + >>> order_mock = Mock(spec=Order, wraps=Order) + >>> order_mock.get_value.side_effect = ["first"] + >>> order_mock.get_value.return_value = "second" + >>> order_mock.get_value() + 'first' + +As ``None`` is the default value of :attr:`~Mock.side_effect`, if you reassign +its value back to ``None``, the order of precedence will be checked between +:attr:`~Mock.return_value` and the wrapped object, ignoring +:attr:`~Mock.side_effect`. + + >>> order_mock.get_value.side_effect = None + >>> order_mock.get_value() + 'second' + +If the value being returned by :attr:`~Mock.side_effect` is :data:`DEFAULT`, +it is ignored and the order of precedence moves to the successor to obtain the +value to return. + + >>> from unittest.mock import DEFAULT + >>> order_mock.get_value.side_effect = [DEFAULT] + >>> order_mock.get_value() + 'second' + +When :class:`Mock` wraps an object, the default value of +:attr:`~Mock.return_value` will be :data:`DEFAULT`. + + >>> order_mock = Mock(spec=Order, wraps=Order) + >>> order_mock.return_value + sentinel.DEFAULT + >>> order_mock.get_value.return_value + sentinel.DEFAULT + +The order of precedence will ignore this value and it will move to the last +successor which is the wrapped object. + +As the real call is being made to the wrapped object, creating an instance of +this mock will return the real instance of the class. The positional arguments, +if any, required by the wrapped object must be passed. + + >>> order_mock_instance = order_mock() + >>> isinstance(order_mock_instance, Order) + True + >>> order_mock_instance.get_value() + 'third' + + >>> order_mock.get_value.return_value = DEFAULT + >>> order_mock.get_value() + 'third' + + >>> order_mock.get_value.return_value = "second" + >>> order_mock.get_value() + 'second' + +But if you assign ``None`` to it, this will not be ignored as it is an +explicit assignment. So, the order of precedence will not move to the wrapped +object. + + >>> order_mock.get_value.return_value = None + >>> order_mock.get_value() is None + True + +Even if you set all three at once when initializing the mock, the order of +precedence remains the same: + + >>> order_mock = Mock(spec=Order, wraps=Order, + ... **{"get_value.side_effect": ["first"], + ... "get_value.return_value": "second"} + ... ) + ... + >>> order_mock.get_value() + 'first' + >>> order_mock.get_value.side_effect = None + >>> order_mock.get_value() + 'second' + >>> order_mock.get_value.return_value = DEFAULT + >>> order_mock.get_value() + 'third' + +If :attr:`~Mock.side_effect` is exhausted, the order of precedence will not +cause a value to be obtained from the successors. Instead, ``StopIteration`` +exception is raised. + + >>> order_mock = Mock(spec=Order, wraps=Order) + >>> order_mock.get_value.side_effect = ["first side effect value", + ... "another side effect value"] + >>> order_mock.get_value.return_value = "second" + + >>> order_mock.get_value() + 'first side effect value' + >>> order_mock.get_value() + 'another side effect value' + + >>> order_mock.get_value() + Traceback (most recent call last): + ... + StopIteration diff -Nru python3.11-3.11.8/Doc/library/urllib.request.rst python3.11-3.11.9/Doc/library/urllib.request.rst --- python3.11-3.11.8/Doc/library/urllib.request.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/urllib.request.rst 2024-04-02 08:25:04.000000000 +0000 @@ -105,11 +105,9 @@ .. versionchanged:: 3.2 *cafile* and *capath* were added. - .. versionchanged:: 3.2 HTTPS virtual hosts are now supported if possible (that is, if :const:`ssl.HAS_SNI` is true). - .. versionadded:: 3.2 *data* can be an iterable object. .. versionchanged:: 3.3 diff -Nru python3.11-3.11.8/Doc/library/venv.rst python3.11-3.11.9/Doc/library/venv.rst --- python3.11-3.11.8/Doc/library/venv.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/venv.rst 2024-04-02 08:25:04.000000000 +0000 @@ -54,7 +54,7 @@ .. seealso:: `Python Packaging User Guide: Creating and using virtual environments - `__ + `__ .. include:: ../includes/wasm-notavail.rst @@ -276,10 +276,6 @@ the virtual environment. - .. versionchanged:: 3.12 - The attribute ``lib_path`` was added to the context, and the context - object was documented. - .. versionchanged:: 3.11 The *venv* :ref:`sysconfig installation scheme ` diff -Nru python3.11-3.11.8/Doc/library/xml.etree.elementtree.rst python3.11-3.11.9/Doc/library/xml.etree.elementtree.rst --- python3.11-3.11.8/Doc/library/xml.etree.elementtree.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/xml.etree.elementtree.rst 2024-04-02 08:25:04.000000000 +0000 @@ -49,7 +49,7 @@ Parsing XML ^^^^^^^^^^^ -We'll be using the following XML document as the sample data for this section: +We'll be using the fictive :file:`country_data.xml` XML document as the sample data for this section: .. code-block:: xml @@ -166,6 +166,11 @@ at :func:`iterparse`. It can be useful when you're reading a large XML document and don't want to hold it wholly in memory. +Where *immediate* feedback through events is wanted, calling method +:meth:`XMLPullParser.flush` can help reduce delay; +please make sure to study the related security notes. + + Finding interesting elements ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1378,6 +1383,24 @@ Feeds data to the parser. *data* is encoded data. + + .. method:: flush() + + Triggers parsing of any previously fed unparsed data, which can be + used to ensure more immediate feedback, in particular with Expat >=2.6.0. + The implementation of :meth:`flush` temporarily disables reparse deferral + with Expat (if currently enabled) and triggers a reparse. + Disabling reparse deferral has security consequences; please see + :meth:`xml.parsers.expat.xmlparser.SetReparseDeferralEnabled` for details. + + Note that :meth:`flush` has been backported to some prior releases of + CPython as a security fix. Check for availability of :meth:`flush` + using :func:`hasattr` if used in code running across a variety of Python + versions. + + .. versionadded:: 3.11.9 + + :meth:`XMLParser.feed` calls *target*\'s ``start(tag, attrs_dict)`` method for each opening tag, its ``end(tag)`` method for each closing tag, and data is processed by method ``data(data)``. For further supported callback @@ -1439,6 +1462,22 @@ Feed the given bytes data to the parser. + .. method:: flush() + + Triggers parsing of any previously fed unparsed data, which can be + used to ensure more immediate feedback, in particular with Expat >=2.6.0. + The implementation of :meth:`flush` temporarily disables reparse deferral + with Expat (if currently enabled) and triggers a reparse. + Disabling reparse deferral has security consequences; please see + :meth:`xml.parsers.expat.xmlparser.SetReparseDeferralEnabled` for details. + + Note that :meth:`flush` has been backported to some prior releases of + CPython as a security fix. Check for availability of :meth:`flush` + using :func:`hasattr` if used in code running across a variety of Python + versions. + + .. versionadded:: 3.11.9 + .. method:: close() Signal the parser that the data stream is terminated. Unlike diff -Nru python3.11-3.11.8/Doc/library/xml.rst python3.11-3.11.9/Doc/library/xml.rst --- python3.11-3.11.8/Doc/library/xml.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/library/xml.rst 2024-04-02 08:25:04.000000000 +0000 @@ -68,6 +68,7 @@ external entity expansion Safe (5) Safe (2) Safe (3) Safe (5) Safe (4) `DTD`_ retrieval Safe (5) Safe Safe Safe (5) Safe decompression bomb Safe Safe Safe Safe **Vulnerable** +large tokens **Vulnerable** (6) **Vulnerable** (6) **Vulnerable** (6) **Vulnerable** (6) **Vulnerable** (6) ========================= ================== ================== ================== ================== ================== 1. Expat 2.4.1 and newer is not vulnerable to the "billion laughs" and @@ -81,6 +82,11 @@ 4. :mod:`xmlrpc.client` doesn't expand external entities and omits them. 5. Since Python 3.7.1, external general entities are no longer processed by default. +6. Expat 2.6.0 and newer is not vulnerable to denial of service + through quadratic runtime caused by parsing large tokens. + Items still listed as vulnerable due to + potential reliance on system-provided libraries. Check + :const:`!pyexpat.EXPAT_VERSION`. billion laughs / exponential entity expansion @@ -114,6 +120,13 @@ files. For an attacker it can reduce the amount of transmitted data by three magnitudes or more. +large tokens + Expat needs to re-parse unfinished tokens; without the protection + introduced in Expat 2.6.0, this can lead to quadratic runtime that can + be used to cause denial of service in the application parsing XML. + The issue is known as + `CVE-2023-52425 `_. + The documentation for `defusedxml`_ on PyPI has further information about all known attack vectors with examples and references. diff -Nru python3.11-3.11.8/Doc/reference/datamodel.rst python3.11-3.11.9/Doc/reference/datamodel.rst --- python3.11-3.11.8/Doc/reference/datamodel.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/reference/datamodel.rst 2024-04-02 08:25:04.000000000 +0000 @@ -34,7 +34,7 @@ Every object has an identity, a type and a value. An object's *identity* never changes once it has been created; you may think of it as the object's address in -memory. The ':keyword:`is`' operator compares the identity of two objects; the +memory. The :keyword:`is` operator compares the identity of two objects; the :func:`id` function returns an integer representing its identity. .. impl-detail:: @@ -81,7 +81,7 @@ Note that the use of the implementation's tracing or debugging facilities may keep objects alive that would normally be collectable. Also note that catching -an exception with a ':keyword:`try`...\ :keyword:`except`' statement may keep +an exception with a :keyword:`try`...\ :keyword:`except` statement may keep objects alive. Some objects contain references to "external" resources such as open files or @@ -89,8 +89,8 @@ garbage-collected, but since garbage collection is not guaranteed to happen, such objects also provide an explicit way to release the external resource, usually a :meth:`!close` method. Programs are strongly recommended to explicitly -close such objects. The ':keyword:`try`...\ :keyword:`finally`' statement -and the ':keyword:`with`' statement provide convenient ways to do this. +close such objects. The :keyword:`try`...\ :keyword:`finally` statement +and the :keyword:`with` statement provide convenient ways to do this. .. index:: single: container @@ -159,7 +159,7 @@ .. index:: pair: object; NotImplemented This type has a single value. There is a single object with this value. This -object is accessed through the built-in name ``NotImplemented``. Numeric methods +object is accessed through the built-in name :data:`NotImplemented`. Numeric methods and rich comparison methods should return this value if they do not implement the operation for the operands provided. (The interpreter will then try the reflected operation, or some other fallback, depending on the operator.) It @@ -170,7 +170,7 @@ for more details. .. versionchanged:: 3.9 - Evaluating ``NotImplemented`` in a boolean context is deprecated. While + Evaluating :data:`NotImplemented` in a boolean context is deprecated. While it currently evaluates as true, it will emit a :exc:`DeprecationWarning`. It will raise a :exc:`TypeError` in a future version of Python. @@ -299,14 +299,17 @@ These represent finite ordered sets indexed by non-negative numbers. The built-in function :func:`len` returns the number of items of a sequence. When the length of a sequence is *n*, the index set contains the numbers 0, 1, -..., *n*-1. Item *i* of sequence *a* is selected by ``a[i]``. +..., *n*-1. Item *i* of sequence *a* is selected by ``a[i]``. Some sequences, +including built-in sequences, interpret negative subscripts by adding the +sequence length. For example, ``a[-2]`` equals ``a[n-2]``, the second to last +item of sequence a with length ``n``. .. index:: single: slicing Sequences also support slicing: ``a[i:j]`` selects all items with index *k* such that *i* ``<=`` *k* ``<`` *j*. When used as an expression, a slice is a -sequence of the same type. This implies that the index set is renumbered so -that it starts at 0. +sequence of the same type. The comment above about negative indexes also applies +to negative slice positions. Some sequences also support "extended slicing" with a third "step" parameter: ``a[i:j:k]`` selects all items of *a* with index *x* where ``x = i + n*k``, *n* @@ -1121,6 +1124,8 @@ * - .. attribute:: codeobject.co_qualname - The fully qualified function name + .. versionadded:: 3.11 + * - .. attribute:: codeobject.co_argcount - The total number of positional :term:`parameters ` (including positional-only parameters and parameters with default values) @@ -1273,6 +1278,12 @@ :pep:`626` - Precise line numbers for debugging and other tools. The PEP that introduced the :meth:`!co_lines` method. +.. method:: codeobject.replace(**kwargs) + + Return a copy of the code object with new values for the specified fields. + + .. versionadded:: 3.8 + .. _frame-objects: @@ -1755,7 +1766,7 @@ ``x.__ne__(y)``, ``x>y`` calls ``x.__gt__(y)``, and ``x>=y`` calls ``x.__ge__(y)``. - A rich comparison method may return the singleton ``NotImplemented`` if it does + A rich comparison method may return the singleton :data:`NotImplemented` if it does not implement the operation for a given pair of arguments. By convention, ``False`` and ``True`` are returned for a successful comparison. However, these methods can return any value, so if the comparison operator is used in a Boolean @@ -1763,10 +1774,10 @@ :func:`bool` on the value to determine if the result is true or false. By default, ``object`` implements :meth:`__eq__` by using ``is``, returning - ``NotImplemented`` in the case of a false comparison: + :data:`NotImplemented` in the case of a false comparison: ``True if x is y else NotImplemented``. For :meth:`__ne__`, by default it delegates to :meth:`__eq__` and inverts the result unless it is - ``NotImplemented``. There are no other implied relationships among the + :data:`!NotImplemented`. There are no other implied relationships among the comparison operators or default implementations; for example, the truth of ``(x=`` 0. The return value may also be - :const:`NotImplemented`, which is treated the same as if the + :data:`NotImplemented`, which is treated the same as if the ``__length_hint__`` method didn't exist at all. This method is purely an optimization and is never required for correctness. @@ -2944,7 +2958,7 @@ function is to be supported. If one of those methods does not support the operation with the supplied - arguments, it should return ``NotImplemented``. + arguments, it should return :data:`NotImplemented`. .. method:: object.__radd__(self, other) @@ -2974,7 +2988,7 @@ types. [#]_ For instance, to evaluate the expression ``x - y``, where *y* is an instance of a class that has an :meth:`__rsub__` method, ``type(y).__rsub__(y, x)`` is called if ``type(x).__sub__(x, y)`` returns - *NotImplemented*. + :data:`NotImplemented`. .. index:: pair: built-in function; pow @@ -3008,10 +3022,12 @@ (``+=``, ``-=``, ``*=``, ``@=``, ``/=``, ``//=``, ``%=``, ``**=``, ``<<=``, ``>>=``, ``&=``, ``^=``, ``|=``). These methods should attempt to do the operation in-place (modifying *self*) and return the result (which could be, - but does not have to be, *self*). If a specific method is not defined, the + but does not have to be, *self*). If a specific method is not defined, or if + that method returns :data:`NotImplemented`, the augmented assignment falls back to the normal methods. For instance, if *x* is an instance of a class with an :meth:`__iadd__` method, ``x += y`` is - equivalent to ``x = x.__iadd__(y)`` . Otherwise, ``x.__add__(y)`` and + equivalent to ``x = x.__iadd__(y)`` . If :meth:`__iadd__` does not exist, or if ``x.__iadd__(y)`` + returns :data:`!NotImplemented`, ``x.__add__(y)`` and ``y.__radd__(x)`` are considered, as with the evaluation of ``x + y``. In certain situations, augmented assignment can result in unexpected errors (see :ref:`faq-augmented-assignment-tuple-error`), but this behavior is in fact @@ -3420,7 +3436,7 @@ the behavior that ``None`` is not callable. .. [#] "Does not support" here means that the class has no such method, or - the method returns ``NotImplemented``. Do not set the method to + the method returns :data:`NotImplemented`. Do not set the method to ``None`` if you want to force fallback to the right operand's reflected method—that will instead have the opposite effect of explicitly *blocking* such fallback. diff -Nru python3.11-3.11.8/Doc/reference/expressions.rst python3.11-3.11.9/Doc/reference/expressions.rst --- python3.11-3.11.8/Doc/reference/expressions.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/reference/expressions.rst 2024-04-02 08:25:04.000000000 +0000 @@ -1522,7 +1522,7 @@ ``x == x`` are all false, while ``x != x`` is true. This behavior is compliant with IEEE 754. -* ``None`` and ``NotImplemented`` are singletons. :PEP:`8` advises that +* ``None`` and :data:`NotImplemented` are singletons. :PEP:`8` advises that comparisons for singletons should always be done with ``is`` or ``is not``, never the equality operators. diff -Nru python3.11-3.11.8/Doc/reference/lexical_analysis.rst python3.11-3.11.9/Doc/reference/lexical_analysis.rst --- python3.11-3.11.8/Doc/reference/lexical_analysis.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/reference/lexical_analysis.rst 2024-04-02 08:25:04.000000000 +0000 @@ -96,10 +96,9 @@ which is recognized by Bram Moolenaar's VIM. -If no encoding declaration is found, the default encoding is UTF-8. In -addition, if the first bytes of the file are the UTF-8 byte-order mark -(``b'\xef\xbb\xbf'``), the declared file encoding is UTF-8 (this is supported, -among others, by Microsoft's :program:`notepad`). +If no encoding declaration is found, the default encoding is UTF-8. If the +implicit or explicit encoding of a file is UTF-8, an initial UTF-8 byte-order +mark (b'\xef\xbb\xbf') is ignored rather than being a syntax error. If an encoding is declared, the encoding name must be recognized by Python (see :ref:`standard-encodings`). The @@ -508,7 +507,6 @@ The ``'rb'`` prefix of raw bytes literals has been added as a synonym of ``'br'``. -.. versionadded:: 3.3 Support for the unicode legacy literal (``u'value'``) was reintroduced to simplify the maintenance of dual Python 2.x and 3.x codebases. See :pep:`414` for more information. @@ -721,7 +719,7 @@ : ("," `conditional_expression` | "," "*" `or_expr`)* [","] : | `yield_expression` conversion: "s" | "r" | "a" - format_spec: (`literal_char` | NULL | `replacement_field`)* + format_spec: (`literal_char` | `replacement_field`)* literal_char: The parts of the string outside curly braces are treated literally, diff -Nru python3.11-3.11.8/Doc/reference/simple_stmts.rst python3.11-3.11.9/Doc/reference/simple_stmts.rst --- python3.11-3.11.8/Doc/reference/simple_stmts.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/reference/simple_stmts.rst 2024-04-02 08:25:04.000000000 +0000 @@ -663,8 +663,7 @@ .. versionchanged:: 3.3 :const:`None` is now permitted as ``Y`` in ``raise X from Y``. -.. versionadded:: 3.3 - The :attr:`~BaseException.__suppress_context__` attribute to suppress + Added the :attr:`~BaseException.__suppress_context__` attribute to suppress automatic display of the exception context. .. versionchanged:: 3.11 diff -Nru python3.11-3.11.8/Doc/tools/.nitignore python3.11-3.11.9/Doc/tools/.nitignore --- python3.11-3.11.8/Doc/tools/.nitignore 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/tools/.nitignore 2024-04-02 08:25:04.000000000 +0000 @@ -3,16 +3,13 @@ # Keep lines sorted lexicographically to help avoid merge conflicts. Doc/c-api/descriptor.rst -Doc/c-api/exceptions.rst Doc/c-api/float.rst -Doc/c-api/gcsupport.rst Doc/c-api/init.rst Doc/c-api/init_config.rst Doc/c-api/intro.rst Doc/c-api/module.rst Doc/c-api/stable.rst Doc/c-api/structures.rst -Doc/c-api/sys.rst Doc/c-api/type.rst Doc/c-api/typeobj.rst Doc/extending/extending.rst @@ -23,7 +20,6 @@ Doc/library/asyncio-extending.rst Doc/library/asyncio-policy.rst Doc/library/asyncio-subprocess.rst -Doc/library/bdb.rst Doc/library/collections.rst Doc/library/copy.rst Doc/library/ctypes.rst @@ -35,10 +31,8 @@ Doc/library/email.errors.rst Doc/library/email.parser.rst Doc/library/email.policy.rst -Doc/library/enum.rst Doc/library/exceptions.rst Doc/library/faulthandler.rst -Doc/library/fcntl.rst Doc/library/functools.rst Doc/library/http.cookiejar.rst Doc/library/http.server.rst @@ -54,7 +48,6 @@ Doc/library/platform.rst Doc/library/plistlib.rst Doc/library/profile.rst -Doc/library/pydoc.rst Doc/library/pyexpat.rst Doc/library/readline.rst Doc/library/resource.rst @@ -89,15 +82,11 @@ Doc/reference/datamodel.rst Doc/tutorial/datastructures.rst Doc/using/windows.rst -Doc/whatsnew/2.0.rst -Doc/whatsnew/2.1.rst Doc/whatsnew/2.4.rst Doc/whatsnew/2.5.rst Doc/whatsnew/2.6.rst Doc/whatsnew/2.7.rst Doc/whatsnew/3.0.rst -Doc/whatsnew/3.1.rst -Doc/whatsnew/3.2.rst Doc/whatsnew/3.3.rst Doc/whatsnew/3.4.rst Doc/whatsnew/3.5.rst diff -Nru python3.11-3.11.8/Doc/tools/static/changelog_search.js python3.11-3.11.9/Doc/tools/static/changelog_search.js --- python3.11-3.11.8/Doc/tools/static/changelog_search.js 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/tools/static/changelog_search.js 2024-04-02 08:25:04.000000000 +0000 @@ -1,53 +1,59 @@ -$(document).ready(function() { - // add the search form and bind the events - $('h1').after([ - '

Filter entries by content:', - '', - '

' - ].join('\n')); +document.addEventListener("DOMContentLoaded", function () { + // add the search form and bind the events + document + .querySelector("h1") + .insertAdjacentHTML( + "afterend", + [ + "

Filter entries by content:", + '', + '

', + ].join("\n"), + ); - function dofilter() { - try { - var query = new RegExp($('#searchbox').val(), 'i'); + function doFilter() { + let query; + try { + query = new RegExp(document.querySelector("#searchbox").value, "i"); + } catch (e) { + return; // not a valid regex (yet) + } + // find headers for the versions (What's new in Python X.Y.Z?) + const h2s = document.querySelectorAll("#changelog h2"); + for (const h2 of h2s) { + let sections_found = 0; + // find headers for the sections (Core, Library, etc.) + const h3s = h2.parentNode.querySelectorAll("h3"); + for (const h3 of h3s) { + let entries_found = 0; + // find all the entries + const lis = h3.parentNode.querySelectorAll("li"); + for (let li of lis) { + // check if the query matches the entry + if (query.test(li.textContent)) { + li.style.display = "block"; + entries_found++; + } else { + li.style.display = "none"; + } } - catch (e) { - return; // not a valid regex (yet) + // if there are entries, show the section, otherwise hide it + if (entries_found > 0) { + h3.parentNode.style.display = "block"; + sections_found++; + } else { + h3.parentNode.style.display = "none"; } - // find headers for the versions (What's new in Python X.Y.Z?) - $('#changelog h2').each(function(index1, h2) { - var h2_parent = $(h2).parent(); - var sections_found = 0; - // find headers for the sections (Core, Library, etc.) - h2_parent.find('h3').each(function(index2, h3) { - var h3_parent = $(h3).parent(); - var entries_found = 0; - // find all the entries - h3_parent.find('li').each(function(index3, li) { - var li = $(li); - // check if the query matches the entry - if (query.test(li.text())) { - li.show(); - entries_found++; - } - else { - li.hide(); - } - }); - // if there are entries, show the section, otherwise hide it - if (entries_found > 0) { - h3_parent.show(); - sections_found++; - } - else { - h3_parent.hide(); - } - }); - if (sections_found > 0) - h2_parent.show(); - else - h2_parent.hide(); - }); + } + if (sections_found > 0) { + h2.parentNode.style.display = "block"; + } else { + h2.parentNode.style.display = "none"; + } } - $('#searchbox').keyup(dofilter); - $('#searchbox-submit').click(dofilter); + } + document.querySelector("#searchbox").addEventListener("keyup", doFilter); + document + .querySelector("#searchbox-submit") + .addEventListener("click", doFilter); }); diff -Nru python3.11-3.11.8/Doc/tools/templates/indexcontent.html python3.11-3.11.9/Doc/tools/templates/indexcontent.html --- python3.11-3.11.8/Doc/tools/templates/indexcontent.html 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/tools/templates/indexcontent.html 2024-04-02 08:25:04.000000000 +0000 @@ -7,62 +7,62 @@

{% trans %}Welcome! This is the official documentation for Python {{ release }}.{% endtrans %}

-

{% trans %}Parts of the documentation:{% endtrans %}

+

{% trans %}Documentation sections:{% endtrans %}

+ {% trans whatsnew_index=pathto("whatsnew/index") %}Or all "What's new" documents since Python 2.0{% endtrans %}

- - - + {% trans %}Start here: a tour of Python's syntax and features{% endtrans %}

+ + + + {% trans %}In-depth topic manuals{% endtrans %}

- - - - + + + + + {% trans %}Frequently asked questions (with answers!){% endtrans %}

-

{% trans %}Indices and tables:{% endtrans %}

+

{% trans %}Indices, glossary, and search:{% endtrans %}

- - + + + {% trans %}Terms explained{% endtrans %}

- + {% trans %}Search this documentation{% endtrans %}

+
-

{% trans %}Meta information:{% endtrans %}

+

{% trans %}Project information:{% endtrans %}

- + - + - + - +
{% endblock %} diff -Nru python3.11-3.11.8/Doc/tutorial/errors.rst python3.11-3.11.9/Doc/tutorial/errors.rst --- python3.11-3.11.8/Doc/tutorial/errors.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/tutorial/errors.rst 2024-04-02 08:25:04.000000000 +0000 @@ -20,12 +20,12 @@ >>> while True print('Hello world') File "", line 1 while True print('Hello world') - ^ + ^^^^^ SyntaxError: invalid syntax -The parser repeats the offending line and displays a little 'arrow' pointing at -the earliest point in the line where the error was detected. The error is -caused by (or at least detected at) the token *preceding* the arrow: in the +The parser repeats the offending line and displays little 'arrow's pointing +at the token in the line where the error was detected. The error may be +caused by the absence of a token *before* the indicated token. In the example, the error is detected at the function :func:`print`, since a colon (``':'``) is missing before it. File name and line number are printed so you know where to look in case the input came from a script. diff -Nru python3.11-3.11.8/Doc/tutorial/introduction.rst python3.11-3.11.9/Doc/tutorial/introduction.rst --- python3.11-3.11.8/Doc/tutorial/introduction.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/tutorial/introduction.rst 2024-04-02 08:25:04.000000000 +0000 @@ -405,13 +405,6 @@ >>> squares[-3:] # slicing returns a new list [9, 16, 25] -All slice operations return a new list containing the requested elements. This -means that the following slice returns a -:ref:`shallow copy ` of the list:: - - >>> squares[:] - [1, 4, 9, 16, 25] - Lists also support operations like concatenation:: >>> squares + [36, 49, 64, 81, 100] @@ -435,6 +428,30 @@ >>> cubes [1, 8, 27, 64, 125, 216, 343] +Simple assignment in Python never copies data. When you assign a list +to a variable, the variable refers to the *existing list*. +Any changes you make to the list through one variable will be seen +through all other variables that refer to it.:: + + >>> rgb = ["Red", "Green", "Blue"] + >>> rgba = rgb + >>> id(rgb) == id(rgba) # they reference the same object + True + >>> rgba.append("Alph") + >>> rgb + ["Red", "Green", "Blue", "Alph"] + +All slice operations return a new list containing the requested elements. This +means that the following slice returns a +:ref:`shallow copy ` of the list:: + + >>> correct_rgba = rgba[:] + >>> correct_rgba[-1] = "Alpha" + >>> correct_rgba + ["Red", "Green", "Blue", "Alpha"] + >>> rgba + ["Red", "Green", "Blue", "Alph"] + Assignment to slices is also possible, and this can even change the size of the list or clear it entirely:: diff -Nru python3.11-3.11.8/Doc/using/cmdline.rst python3.11-3.11.9/Doc/using/cmdline.rst --- python3.11-3.11.8/Doc/using/cmdline.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/using/cmdline.rst 2024-04-02 08:25:04.000000000 +0000 @@ -242,12 +242,13 @@ .. option:: -b - Issue a warning when comparing :class:`bytes` or :class:`bytearray` with - :class:`str` or :class:`bytes` with :class:`int`. Issue an error when the - option is given twice (:option:`!-bb`). + Issue a warning when converting :class:`bytes` or :class:`bytearray` to + :class:`str` without specifying encoding or comparing :class:`!bytes` or + :class:`!bytearray` with :class:`!str` or :class:`!bytes` with :class:`int`. + Issue an error when the option is given twice (:option:`!-bb`). .. versionchanged:: 3.5 - Affects comparisons of :class:`bytes` with :class:`int`. + Affects also comparisons of :class:`bytes` with :class:`int`. .. option:: -B @@ -372,17 +373,19 @@ :envvar:`PYTHONHASHSEED` allows you to set a fixed value for the hash seed secret. + .. versionadded:: 3.2.3 + .. versionchanged:: 3.7 The option is no longer ignored. - .. versionadded:: 3.2.3 - .. option:: -s Don't add the :data:`user site-packages directory ` to :data:`sys.path`. + See also :envvar:`PYTHONNOUSERSITE`. + .. seealso:: :pep:`370` -- Per user site-packages directory @@ -514,7 +517,7 @@ asyncio'``. See also :envvar:`PYTHONPROFILEIMPORTTIME`. * ``-X dev``: enable :ref:`Python Development Mode `, introducing additional runtime checks that are too expensive to be enabled by - default. + default. See also :envvar:`PYTHONDEVMODE`. * ``-X utf8`` enables the :ref:`Python UTF-8 Mode `. ``-X utf8=0`` explicitly disables :ref:`Python UTF-8 Mode ` (even when it would otherwise activate automatically). @@ -542,23 +545,22 @@ It also allows passing arbitrary values and retrieving them through the :data:`sys._xoptions` dictionary. - .. versionchanged:: 3.2 - The :option:`-X` option was added. + .. versionadded:: 3.2 - .. versionadded:: 3.3 - The ``-X faulthandler`` option. + .. versionchanged:: 3.3 + Added the ``-X faulthandler`` option. - .. versionadded:: 3.4 - The ``-X showrefcount`` and ``-X tracemalloc`` options. + .. versionchanged:: 3.4 + Added the ``-X showrefcount`` and ``-X tracemalloc`` options. - .. versionadded:: 3.6 - The ``-X showalloccount`` option. + .. versionchanged:: 3.6 + Added the ``-X showalloccount`` option. - .. versionadded:: 3.7 - The ``-X importtime``, ``-X dev`` and ``-X utf8`` options. + .. versionchanged:: 3.7 + Added the ``-X importtime``, ``-X dev`` and ``-X utf8`` options. - .. versionadded:: 3.8 - The ``-X pycache_prefix`` option. The ``-X dev`` option now logs + .. versionchanged:: 3.8 + Added the ``-X pycache_prefix`` option. The ``-X dev`` option now logs ``close()`` exceptions in :class:`io.IOBase` destructor. .. versionchanged:: 3.9 @@ -567,18 +569,13 @@ The ``-X showalloccount`` option has been removed. - .. versionadded:: 3.10 - The ``-X warn_default_encoding`` option. + .. versionchanged:: 3.10 + Added the ``-X warn_default_encoding`` option. Removed the ``-X oldparser`` option. - .. versionadded:: 3.11 - The ``-X no_debug_ranges`` option. - - .. versionadded:: 3.11 - The ``-X frozen_modules`` option. - - .. versionadded:: 3.11 - The ``-X int_max_str_digits`` option. + .. versionchanged:: 3.11 + Added the ``-X no_debug_ranges``, ``-X frozen_modules`` and + ``-X int_max_str_digits`` options. Options you shouldn't use @@ -895,11 +892,11 @@ * ``malloc_debug``: same as ``malloc`` but also install debug hooks. * ``pymalloc_debug``: same as ``pymalloc`` but also install debug hooks. + .. versionadded:: 3.6 + .. versionchanged:: 3.7 Added the ``"default"`` allocator. - .. versionadded:: 3.6 - .. envvar:: PYTHONMALLOCSTATS diff -Nru python3.11-3.11.8/Doc/using/configure.rst python3.11-3.11.9/Doc/using/configure.rst --- python3.11-3.11.8/Doc/using/configure.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/using/configure.rst 2024-04-02 08:25:04.000000000 +0000 @@ -2,6 +2,8 @@ Configure Python **************** +.. highlight:: sh + .. _configure-options: Configure Options @@ -587,7 +589,9 @@ An environment variable that points to a file with configure overrides. - Example *config.site* file:: + Example *config.site* file: + + .. code-block:: ini # config.site-aarch64 ac_cv_buggy_getaddrinfo=no @@ -652,7 +656,9 @@ Some C extensions are built as built-in modules, like the ``sys`` module. They are built with the ``Py_BUILD_CORE_BUILTIN`` macro defined. -Built-in modules have no ``__file__`` attribute:: +Built-in modules have no ``__file__`` attribute: + +.. code-block:: pycon >>> import sys >>> sys @@ -664,7 +670,9 @@ Other C extensions are built as dynamic libraries, like the ``_asyncio`` module. They are built with the ``Py_BUILD_CORE_MODULE`` macro defined. -Example on Linux x86-64:: +Example on Linux x86-64: + +.. code-block:: pycon >>> import _asyncio >>> _asyncio diff -Nru python3.11-3.11.8/Doc/using/mac.rst python3.11-3.11.9/Doc/using/mac.rst --- python3.11-3.11.8/Doc/using/mac.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/using/mac.rst 2024-04-02 08:25:04.000000000 +0000 @@ -10,41 +10,46 @@ Python on a Mac running macOS is in principle very similar to Python on any other Unix platform, but there are a number of additional features such as -the IDE and the Package Manager that are worth pointing out. +the integrated development environment (IDE) and the Package Manager that are +worth pointing out. + .. _getting-osx: +.. _getting-and-installing-macpython: -Getting and Installing MacPython -================================ +Getting and Installing Python +============================= macOS used to come with Python 2.7 pre-installed between versions 10.8 and `12.3 `_. -You are invited to install the most recent version of Python 3 from the Python -website (https://www.python.org). A current "universal binary" build of Python, -which runs natively on the Mac's new Intel and legacy PPC CPU's, is available -there. +You are invited to install the most recent version of Python 3 from the `Python +website `__. +A current "universal2 binary" build of Python, which runs natively on the Mac's +new Apple Silicon and legacy Intel processors, is available there. What you get after installing is a number of things: -* A :file:`Python 3.12` folder in your :file:`Applications` folder. In here +* A |python_version_literal| folder in your :file:`Applications` folder. In here you find IDLE, the development environment that is a standard part of official - Python distributions; and PythonLauncher, which handles double-clicking Python + Python distributions; and :program:`Python Launcher`, which handles double-clicking Python scripts from the Finder. * A framework :file:`/Library/Frameworks/Python.framework`, which includes the Python executable and libraries. The installer adds this location to your shell - path. To uninstall MacPython, you can simply remove these three things. A - symlink to the Python executable is placed in /usr/local/bin/. + path. To uninstall Python, you can remove these three things. A + symlink to the Python executable is placed in :file:`/usr/local/bin/`. -The Apple-provided build of Python is installed in -:file:`/System/Library/Frameworks/Python.framework` and :file:`/usr/bin/python`, -respectively. You should never modify or delete these, as they are -Apple-controlled and are used by Apple- or third-party software. Remember that -if you choose to install a newer Python version from python.org, you will have -two different but functional Python installations on your computer, so it will -be important that your paths and usages are consistent with what you want to do. +.. note:: -IDLE includes a help menu that allows you to access Python documentation. If you + On macOS 10.8-12.3, the Apple-provided build of Python is installed in + :file:`/System/Library/Frameworks/Python.framework` and :file:`/usr/bin/python`, + respectively. You should never modify or delete these, as they are + Apple-controlled and are used by Apple- or third-party software. Remember that + if you choose to install a newer Python version from python.org, you will have + two different but functional Python installations on your computer, so it will + be important that your paths and usages are consistent with what you want to do. + +IDLE includes a Help menu that allows you to access Python documentation. If you are completely new to Python you should start reading the tutorial introduction in that document. @@ -56,29 +61,29 @@ -------------------------- Your best way to get started with Python on macOS is through the IDLE -integrated development environment, see section :ref:`ide` and use the Help menu +integrated development environment; see section :ref:`ide` and use the Help menu when the IDE is running. If you want to run Python scripts from the Terminal window command line or from the Finder you first need an editor to create your script. macOS comes with a -number of standard Unix command line editors, :program:`vim` and -:program:`emacs` among them. If you want a more Mac-like editor, -:program:`BBEdit` or :program:`TextWrangler` from Bare Bones Software (see -http://www.barebones.com/products/bbedit/index.html) are good choices, as is -:program:`TextMate` (see https://macromates.com/). Other editors include -:program:`Gvim` (https://macvim.org/macvim/) and :program:`Aquamacs` -(http://aquamacs.org/). +number of standard Unix command line editors, :program:`vim` +:program:`nano` among them. If you want a more Mac-like editor, +:program:`BBEdit` from Bare Bones Software (see +https://www.barebones.com/products/bbedit/index.html) are good choices, as is +:program:`TextMate` (see https://macromates.com). Other editors include +:program:`MacVim` (https://macvim.org) and :program:`Aquamacs` +(https://aquamacs.org). To run your script from the Terminal window you must make sure that :file:`/usr/local/bin` is in your shell search path. To run your script from the Finder you have two options: -* Drag it to :program:`PythonLauncher` +* Drag it to :program:`Python Launcher`. -* Select :program:`PythonLauncher` as the default application to open your - script (or any .py script) through the finder Info window and double-click it. - :program:`PythonLauncher` has various preferences to control how your script is +* Select :program:`Python Launcher` as the default application to open your + script (or any ``.py`` script) through the finder Info window and double-click it. + :program:`Python Launcher` has various preferences to control how your script is launched. Option-dragging allows you to change these for one invocation, or use its Preferences menu to change things globally. @@ -103,10 +108,11 @@ :envvar:`PYTHONPATH`, but setting these variables for programs started from the Finder is non-standard as the Finder does not read your :file:`.profile` or :file:`.cshrc` at startup. You need to create a file -:file:`~/.MacOSX/environment.plist`. See Apple's Technical Document QA1067 for -details. +:file:`~/.MacOSX/environment.plist`. See Apple's +`Technical Q&A QA1067 `__ +for details. -For more information on installation Python packages in MacPython, see section +For more information on installation Python packages, see section :ref:`mac-package-manager`. @@ -115,9 +121,9 @@ The IDE ======= -MacPython ships with the standard IDLE development environment. A good +Python ships with the standard IDLE development environment. A good introduction to using IDLE can be found at -http://www.hashcollision.org/hkn/python/idle_intro/index.html. +https://www.hashcollision.org/hkn/python/idle_intro/index.html. .. _mac-package-manager: @@ -130,8 +136,10 @@ .. _Python Packaging User Guide: https://packaging.python.org/en/latest/tutorials/installing-packages/ -GUI Programming on the Mac -========================== +.. _gui-programming-on-the-mac: + +GUI Programming +=============== There are several options for building GUI applications on the Mac with Python. @@ -144,31 +152,50 @@ X by Apple, and the latest version can be downloaded and installed from https://www.activestate.com; it can also be built from source. -*wxPython* is another popular cross-platform GUI toolkit that runs natively on -macOS. Packages and documentation are available from https://www.wxpython.org. +A number of alternative macOS GUI toolkits are available: + +* `PySide `__: Official Python bindings to the + `Qt GUI toolkit `__. -*PyQt* is another popular cross-platform GUI toolkit that runs natively on -macOS. More information can be found at -https://riverbankcomputing.com/software/pyqt/intro. +* `PyQt `__: Alternative + Python bindings to Qt. +* `Kivy `__: A cross-platform GUI toolkit that supports + desktop and mobile platforms. -Distributing Python Applications on the Mac -=========================================== +* `Toga `__: Part of the `BeeWare Project + `__; supports desktop, mobile, web and console apps. -The standard tool for deploying standalone Python applications on the Mac is -:program:`py2app`. More information on installing and using py2app can be found -at https://pypi.org/project/py2app/. +* `wxPython `__: A cross-platform toolkit that + supports desktop operating systems. +.. _distributing-python-applications-on-the-mac: + +Distributing Python Applications +================================ + +A range of tools exist for converting your Python code into a standalone +distributable application: + +* `py2app `__: Supports creating macOS ``.app`` + bundles from a Python project. + +* `Briefcase `__: Part of the `BeeWare Project + `__; a cross-platform packaging tool that supports + creation of ``.app`` bundles on macOS, as well as managing signing and + notarization. + +* `PyInstaller `__: A cross-platform packaging tool that creates + a single file or folder as a distributable artifact. Other Resources =============== -The MacPython mailing list is an excellent support resource for Python users and -developers on the Mac: +The Pythonmac-SIG mailing list is an excellent support resource for Python users +and developers on the Mac: https://www.python.org/community/sigs/current/pythonmac-sig/ Another useful resource is the MacPython wiki: https://wiki.python.org/moin/MacPython - diff -Nru python3.11-3.11.8/Doc/using/venv-create.inc python3.11-3.11.9/Doc/using/venv-create.inc --- python3.11-3.11.8/Doc/using/venv-create.inc 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/using/venv-create.inc 2024-04-02 08:25:04.000000000 +0000 @@ -14,14 +14,14 @@ ``Lib\site-packages``). If an existing directory is specified, it will be re-used. +.. versionchanged:: 3.5 + The use of ``venv`` is now recommended for creating virtual environments. + .. deprecated:: 3.6 ``pyvenv`` was the recommended tool for creating virtual environments for Python 3.3 and 3.4, and is :ref:`deprecated in Python 3.6 `. -.. versionchanged:: 3.5 - The use of ``venv`` is now recommended for creating virtual environments. - .. highlight:: none On Windows, invoke the ``venv`` command as follows:: diff -Nru python3.11-3.11.8/Doc/using/windows.rst python3.11-3.11.9/Doc/using/windows.rst --- python3.11-3.11.8/Doc/using/windows.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/using/windows.rst 2024-04-02 08:25:04.000000000 +0000 @@ -14,8 +14,8 @@ Unlike most Unix systems and services, Windows does not include a system supported installation of Python. To make Python available, the CPython team -has compiled Windows installers (MSI packages) with every `release -`_ for many years. These installers +has compiled Windows installers with every `release +`_ for many years. These installers are primarily intended to add a per-user installation of Python, with the core interpreter and library being used by a single user. The installer is also able to install for all users of a single machine, and a separate ZIP file is diff -Nru python3.11-3.11.8/Doc/whatsnew/2.0.rst python3.11-3.11.9/Doc/whatsnew/2.0.rst --- python3.11-3.11.8/Doc/whatsnew/2.0.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/whatsnew/2.0.rst 2024-04-02 08:25:04.000000000 +0000 @@ -217,13 +217,13 @@ was consumed. * *stream_reader* is a class that supports decoding input from a stream. - *stream_reader(file_obj)* returns an object that supports the :meth:`read`, - :meth:`readline`, and :meth:`readlines` methods. These methods will all + *stream_reader(file_obj)* returns an object that supports the :meth:`!read`, + :meth:`!readline`, and :meth:`!readlines` methods. These methods will all translate from the given encoding and return Unicode strings. * *stream_writer*, similarly, is a class that supports encoding output to a stream. *stream_writer(file_obj)* returns an object that supports the - :meth:`write` and :meth:`writelines` methods. These methods expect Unicode + :meth:`!write` and :meth:`!writelines` methods. These methods expect Unicode strings, translating them to the given encoding on output. For example, the following code writes a Unicode string into a file, encoding @@ -356,8 +356,8 @@ The full list of supported assignment operators is ``+=``, ``-=``, ``*=``, ``/=``, ``%=``, ``**=``, ``&=``, ``|=``, ``^=``, ``>>=``, and ``<<=``. Python classes can override the augmented assignment operators by defining methods -named :meth:`__iadd__`, :meth:`__isub__`, etc. For example, the following -:class:`Number` class stores a number and supports using += to create a new +named :meth:`!__iadd__`, :meth:`!__isub__`, etc. For example, the following +:class:`!Number` class stores a number and supports using += to create a new instance with an incremented value. .. The empty groups below prevent conversion to guillemets. @@ -374,7 +374,7 @@ n += 3 print n.value -The :meth:`__iadd__` special method is called with the value of the increment, +The :meth:`!__iadd__` special method is called with the value of the increment, and should return a new instance with an appropriately modified value; this return value is bound as the new value of the variable on the left-hand side. @@ -390,10 +390,10 @@ ============== Until now string-manipulation functionality was in the :mod:`string` module, -which was usually a front-end for the :mod:`strop` module written in C. The -addition of Unicode posed a difficulty for the :mod:`strop` module, because the +which was usually a front-end for the :mod:`!strop` module written in C. The +addition of Unicode posed a difficulty for the :mod:`!strop` module, because the functions would all need to be rewritten in order to accept either 8-bit or -Unicode strings. For functions such as :func:`string.replace`, which takes 3 +Unicode strings. For functions such as :func:`!string.replace`, which takes 3 string arguments, that means eight possible permutations, and correspondingly complicated code. @@ -416,13 +416,13 @@ mostly acts as a front-end to the new string methods. Two methods which have no parallel in pre-2.0 versions, although they did exist -in JPython for quite some time, are :meth:`startswith` and :meth:`endswith`. +in JPython for quite some time, are :meth:`!startswith` and :meth:`!endswith`. ``s.startswith(t)`` is equivalent to ``s[:len(t)] == t``, while ``s.endswith(t)`` is equivalent to ``s[-len(t):] == t``. -One other method which deserves special mention is :meth:`join`. The -:meth:`join` method of a string receives one parameter, a sequence of strings, -and is equivalent to the :func:`string.join` function from the old :mod:`string` +One other method which deserves special mention is :meth:`!join`. The +:meth:`!join` method of a string receives one parameter, a sequence of strings, +and is equivalent to the :func:`!string.join` function from the old :mod:`string` module, with the arguments reversed. In other words, ``s.join(seq)`` is equivalent to the old ``string.join(seq, s)``. @@ -503,9 +503,9 @@ A new syntax makes it more convenient to call a given function with a tuple of arguments and/or a dictionary of keyword arguments. In Python 1.5 and earlier, -you'd use the :func:`apply` built-in function: ``apply(f, args, kw)`` calls the -function :func:`f` with the argument tuple *args* and the keyword arguments in -the dictionary *kw*. :func:`apply` is the same in 2.0, but thanks to a patch +you'd use the :func:`!apply` built-in function: ``apply(f, args, kw)`` calls the +function :func:`!f` with the argument tuple *args* and the keyword arguments in +the dictionary *kw*. :func:`!apply` is the same in 2.0, but thanks to a patch from Greg Ewing, ``f(*args, **kw)`` is a shorter and clearer way to achieve the same effect. This syntax is symmetrical with the syntax for defining functions:: @@ -518,7 +518,7 @@ The ``print`` statement can now have its output directed to a file-like object by following the ``print`` with ``>> file``, similar to the redirection operator in Unix shells. Previously you'd either have to use the -:meth:`write` method of the file-like object, which lacks the convenience and +:meth:`!write` method of the file-like object, which lacks the convenience and simplicity of ``print``, or you could assign a new value to ``sys.stdout`` and then restore the old value. For sending output to standard error, it's much easier to write this:: @@ -540,7 +540,7 @@ true if *obj* is present in the sequence *seq*; Python computes this by simply trying every index of the sequence until either *obj* is found or an :exc:`IndexError` is encountered. Moshe Zadka contributed a patch which adds a -:meth:`__contains__` magic method for providing a custom implementation for +:meth:`!__contains__` magic method for providing a custom implementation for :keyword:`!in`. Additionally, new built-in objects written in C can define what :keyword:`!in` means for them via a new slot in the sequence protocol. @@ -562,7 +562,7 @@ implementation, and some useful relevant links. Note that comparisons can now also raise exceptions. In earlier versions of Python, a comparison operation such as ``cmp(a,b)`` would always produce an answer, even if a user-defined -:meth:`__cmp__` method encountered an error, since the resulting exception would +:meth:`!__cmp__` method encountered an error, since the resulting exception would simply be silently swallowed. .. Starting URL: @@ -607,7 +607,7 @@ sequences aren't all of the same length, while :func:`zip` truncates the returned list to the length of the shortest argument sequence. -The :func:`int` and :func:`long` functions now accept an optional "base" +The :func:`int` and :func:`!long` functions now accept an optional "base" parameter when the first argument is a string. ``int('123', 10)`` returns 123, while ``int('123', 16)`` returns 291. ``int(123, 16)`` raises a :exc:`TypeError` exception with the message "can't convert non-string with @@ -620,8 +620,8 @@ ``"beta"``, or ``"final"`` for a final release. Dictionaries have an odd new method, ``setdefault(key, default)``, which -behaves similarly to the existing :meth:`get` method. However, if the key is -missing, :meth:`setdefault` both returns the value of *default* as :meth:`get` +behaves similarly to the existing :meth:`!get` method. However, if the key is +missing, :meth:`!setdefault` both returns the value of *default* as :meth:`!get` would do, and also inserts it into the dictionary as the value for *key*. Thus, the following lines of code:: @@ -656,7 +656,7 @@ The change which will probably break the most code is tightening up the arguments accepted by some methods. Some methods would take multiple arguments and treat them as a tuple, particularly various list methods such as -:meth:`append` and :meth:`insert`. In earlier versions of Python, if ``L`` is +:meth:`!append` and :meth:`!insert`. In earlier versions of Python, if ``L`` is a list, ``L.append( 1,2 )`` appends the tuple ``(1,2)`` to the list. In Python 2.0 this causes a :exc:`TypeError` exception to be raised, with the message: 'append requires exactly 1 argument; 2 given'. The fix is to simply add an @@ -693,7 +693,7 @@ Some work has been done to make integers and long integers a bit more interchangeable. In 1.5.2, large-file support was added for Solaris, to allow -reading files larger than 2 GiB; this made the :meth:`tell` method of file +reading files larger than 2 GiB; this made the :meth:`!tell` method of file objects return a long integer instead of a regular integer. Some code would subtract two file offsets and attempt to use the result to multiply a sequence or slice a string, but this raised a :exc:`TypeError`. In 2.0, long integers @@ -701,7 +701,7 @@ intuitively expect it to; ``3L * 'abc'`` produces 'abcabcabc', and ``(0,1,2,3)[2L:4L]`` produces (2,3). Long integers can also be used in various contexts where previously only integers were accepted, such as in the -:meth:`seek` method of file objects, and in the formats supported by the ``%`` +:meth:`!seek` method of file objects, and in the formats supported by the ``%`` operator (``%d``, ``%i``, ``%x``, etc.). For example, ``"%d" % 2L**64`` will produce the string ``18446744073709551616``. @@ -715,7 +715,7 @@ Taking the :func:`repr` of a float now uses a different formatting precision than :func:`str`. :func:`repr` uses ``%.17g`` format string for C's -:func:`sprintf`, while :func:`str` uses ``%.12g`` as before. The effect is that +:func:`!sprintf`, while :func:`str` uses ``%.12g`` as before. The effect is that :func:`repr` may occasionally show more decimal places than :func:`str`, for certain numbers. For example, the number 8.1 can't be represented exactly in binary, so ``repr(8.1)`` is ``'8.0999999999999996'``, while str(8.1) is @@ -723,7 +723,7 @@ The ``-X`` command-line option, which turned all standard exceptions into strings instead of classes, has been removed; the standard exceptions will now -always be classes. The :mod:`exceptions` module containing the standard +always be classes. The :mod:`!exceptions` module containing the standard exceptions was translated from Python to a built-in C module, written by Barry Warsaw and Fredrik Lundh. @@ -879,11 +879,11 @@ XML Modules =========== -Python 1.5.2 included a simple XML parser in the form of the :mod:`xmllib` +Python 1.5.2 included a simple XML parser in the form of the :mod:`!xmllib` module, contributed by Sjoerd Mullender. Since 1.5.2's release, two different interfaces for processing XML have become common: SAX2 (version 2 of the Simple API for XML) provides an event-driven interface with some similarities to -:mod:`xmllib`, and the DOM (Document Object Model) provides a tree-based +:mod:`!xmllib`, and the DOM (Document Object Model) provides a tree-based interface, transforming an XML document into a tree of nodes that can be traversed and modified. Python 2.0 includes a SAX2 interface and a stripped-down DOM interface as part of the :mod:`xml` package. Here we will give a brief @@ -898,9 +898,9 @@ SAX defines an event-driven interface for parsing XML. To use SAX, you must write a SAX handler class. Handler classes inherit from various classes provided by SAX, and override various methods that will then be called by the -XML parser. For example, the :meth:`startElement` and :meth:`endElement` +XML parser. For example, the :meth:`~xml.sax.handler.ContentHandler.startElement` and :meth:`~xml.sax.handler.ContentHandler.endElement` methods are called for every starting and end tag encountered by the parser, the -:meth:`characters` method is called for every chunk of character data, and so +:meth:`~xml.sax.handler.ContentHandler.characters` method is called for every chunk of character data, and so forth. The advantage of the event-driven approach is that the whole document doesn't @@ -940,8 +940,8 @@ ----------- The Document Object Model is a tree-based representation for an XML document. A -top-level :class:`Document` instance is the root of the tree, and has a single -child which is the top-level :class:`Element` instance. This :class:`Element` +top-level :class:`!Document` instance is the root of the tree, and has a single +child which is the top-level :class:`!Element` instance. This :class:`!Element` has children nodes representing character data and any sub-elements, which may have further children of their own, and so forth. Using the DOM you can traverse the resulting tree any way you like, access element and attribute @@ -955,18 +955,18 @@ The DOM implementation included with Python lives in the :mod:`xml.dom.minidom` module. It's a lightweight implementation of the Level 1 DOM with support for -XML namespaces. The :func:`parse` and :func:`parseString` convenience +XML namespaces. The :func:`!parse` and :func:`!parseString` convenience functions are provided for generating a DOM tree:: from xml.dom import minidom doc = minidom.parse('hamlet.xml') -``doc`` is a :class:`Document` instance. :class:`Document`, like all the other -DOM classes such as :class:`Element` and :class:`Text`, is a subclass of the -:class:`Node` base class. All the nodes in a DOM tree therefore support certain -common methods, such as :meth:`toxml` which returns a string containing the XML +``doc`` is a :class:`!Document` instance. :class:`!Document`, like all the other +DOM classes such as :class:`!Element` and :class:`Text`, is a subclass of the +:class:`!Node` base class. All the nodes in a DOM tree therefore support certain +common methods, such as :meth:`!toxml` which returns a string containing the XML representation of the node and its children. Each class also has special -methods of its own; for example, :class:`Element` and :class:`Document` +methods of its own; for example, :class:`!Element` and :class:`!Document` instances have a method to find all child elements with a given tag name. Continuing from the previous 2-line example:: @@ -995,7 +995,7 @@ root.insertBefore( root.childNodes[0], root.childNodes[20] ) Again, I will refer you to the Python documentation for a complete listing of -the different :class:`Node` classes and their various methods. +the different :class:`!Node` classes and their various methods. Relationship to PyXML @@ -1020,7 +1020,7 @@ * The xmlproc validating parser, written by Lars Marius Garshol. -* The :mod:`sgmlop` parser accelerator module, written by Fredrik Lundh. +* The :mod:`!sgmlop` parser accelerator module, written by Fredrik Lundh. .. ====================================================================== @@ -1031,7 +1031,7 @@ Lots of improvements and bugfixes were made to Python's extensive standard library; some of the affected modules include :mod:`readline`, :mod:`ConfigParser `, :mod:`cgi`, :mod:`calendar`, :mod:`posix`, :mod:`readline`, -:mod:`xmllib`, :mod:`aifc`, :mod:`chunk, wave`, :mod:`random`, :mod:`shelve`, +:mod:`!xmllib`, :mod:`aifc`, :mod:`chunk`, :mod:`wave`, :mod:`random`, :mod:`shelve`, and :mod:`nntplib`. Consult the CVS logs for the exact patch-by-patch details. Brian Gallew contributed OpenSSL support for the :mod:`socket` module. OpenSSL @@ -1044,11 +1044,12 @@ FTP or SMTP over SSL. The :mod:`httplib ` module has been rewritten by Greg Stein to support HTTP/1.1. + Backward compatibility with the 1.5 version of :mod:`!httplib` is provided, though using HTTP/1.1 features such as pipelining will require rewriting code to use a different set of interfaces. -The :mod:`Tkinter` module now supports Tcl/Tk version 8.1, 8.2, or 8.3, and +The :mod:`!Tkinter` module now supports Tcl/Tk version 8.1, 8.2, or 8.3, and support for the older 7.x versions has been dropped. The Tkinter module now supports displaying Unicode strings in Tk widgets. Also, Fredrik Lundh contributed an optimization which makes operations like ``create_line`` and @@ -1083,11 +1084,11 @@ calling :func:`atexit.register` with the function to be called on exit. (Contributed by Skip Montanaro.) -* :mod:`codecs`, :mod:`encodings`, :mod:`unicodedata`: Added as part of the new +* :mod:`codecs`, :mod:`!encodings`, :mod:`unicodedata`: Added as part of the new Unicode support. -* :mod:`filecmp`: Supersedes the old :mod:`cmp`, :mod:`cmpcache` and - :mod:`dircmp` modules, which have now become deprecated. (Contributed by Gordon +* :mod:`filecmp`: Supersedes the old :mod:`!cmp`, :mod:`!cmpcache` and + :mod:`!dircmp` modules, which have now become deprecated. (Contributed by Gordon MacMillan and Moshe Zadka.) * :mod:`gettext`: This module provides internationalization (I18N) and @@ -1105,7 +1106,7 @@ be passed to functions that expect ordinary strings, such as the :mod:`re` module. (Contributed by Sam Rushing, with some extensions by A.M. Kuchling.) -* :mod:`pyexpat`: An interface to the Expat XML parser. (Contributed by Paul +* :mod:`!pyexpat`: An interface to the Expat XML parser. (Contributed by Paul Prescod.) * :mod:`robotparser `: Parse a :file:`robots.txt` file, which is used for writing @@ -1117,7 +1118,7 @@ * :mod:`tabnanny`: A module/script to check Python source code for ambiguous indentation. (Contributed by Tim Peters.) -* :mod:`UserString`: A base class useful for deriving objects that behave like +* :mod:`!UserString`: A base class useful for deriving objects that behave like strings. * :mod:`webbrowser`: A module that provides a platform independent way to launch @@ -1184,13 +1185,13 @@ ============================== A few modules have been dropped because they're obsolete, or because there are -now better ways to do the same thing. The :mod:`stdwin` module is gone; it was +now better ways to do the same thing. The :mod:`!stdwin` module is gone; it was for a platform-independent windowing toolkit that's no longer developed. A number of modules have been moved to the :file:`lib-old` subdirectory: -:mod:`cmp`, :mod:`cmpcache`, :mod:`dircmp`, :mod:`dump`, :mod:`find`, -:mod:`grep`, :mod:`packmail`, :mod:`poly`, :mod:`util`, :mod:`whatsound`, -:mod:`zmod`. If you have code which relies on a module that's been moved to +:mod:`!cmp`, :mod:`!cmpcache`, :mod:`!dircmp`, :mod:`!dump`, :mod:`!find`, +:mod:`!grep`, :mod:`!packmail`, :mod:`!poly`, :mod:`!util`, :mod:`!whatsound`, +:mod:`!zmod`. If you have code which relies on a module that's been moved to :file:`lib-old`, you can simply add that directory to ``sys.path`` to get them back, but you're encouraged to update any code that uses these modules. diff -Nru python3.11-3.11.8/Doc/whatsnew/2.1.rst python3.11-3.11.9/Doc/whatsnew/2.1.rst --- python3.11-3.11.8/Doc/whatsnew/2.1.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/whatsnew/2.1.rst 2024-04-02 08:25:04.000000000 +0000 @@ -48,7 +48,7 @@ return g(value-1) + 1 ... -The function :func:`g` will always raise a :exc:`NameError` exception, because +The function :func:`!g` will always raise a :exc:`NameError` exception, because the binding of the name ``g`` isn't in either its local namespace or in the module-level namespace. This isn't much of a problem in practice (how often do you recursively define interior functions like this?), but this also made using @@ -104,7 +104,7 @@ Line 4 containing the ``exec`` statement is a syntax error, since ``exec`` would define a new local variable named ``x`` whose value should -be accessed by :func:`g`. +be accessed by :func:`!g`. This shouldn't be much of a limitation, since ``exec`` is rarely used in most Python code (and when it is used, it's often a sign of a poor design @@ -161,7 +161,7 @@ In earlier versions, Python's support for implementing comparisons on user-defined classes and extension types was quite simple. Classes could implement a -:meth:`__cmp__` method that was given two instances of a class, and could only +:meth:`!__cmp__` method that was given two instances of a class, and could only return 0 if they were equal or +1 or -1 if they weren't; the method couldn't raise an exception or return anything other than a Boolean value. Users of Numeric Python often found this model too weak and restrictive, because in the @@ -175,21 +175,21 @@ Python classes can now individually overload each of the ``<``, ``<=``, ``>``, ``>=``, ``==``, and ``!=`` operations. The new magic method names are: -+-----------+----------------+ -| Operation | Method name | -+===========+================+ -| ``<`` | :meth:`__lt__` | -+-----------+----------------+ -| ``<=`` | :meth:`__le__` | -+-----------+----------------+ -| ``>`` | :meth:`__gt__` | -+-----------+----------------+ -| ``>=`` | :meth:`__ge__` | -+-----------+----------------+ -| ``==`` | :meth:`__eq__` | -+-----------+----------------+ -| ``!=`` | :meth:`__ne__` | -+-----------+----------------+ ++-----------+------------------------+ +| Operation | Method name | ++===========+========================+ +| ``<`` | :meth:`~object.__lt__` | ++-----------+------------------------+ +| ``<=`` | :meth:`~object.__le__` | ++-----------+------------------------+ +| ``>`` | :meth:`~object.__gt__` | ++-----------+------------------------+ +| ``>=`` | :meth:`~object.__ge__` | ++-----------+------------------------+ +| ``==`` | :meth:`~object.__eq__` | ++-----------+------------------------+ +| ``!=`` | :meth:`~object.__ne__` | ++-----------+------------------------+ (The magic methods are named after the corresponding Fortran operators ``.LT.``. ``.LE.``, &c. Numeric programmers are almost certainly quite familiar with @@ -208,7 +208,7 @@ and now accepts an optional argument specifying which comparison operation to use; this is given as one of the strings ``"<"``, ``"<="``, ``">"``, ``">="``, ``"=="``, or ``"!="``. If called without the optional third argument, -:func:`cmp` will only return -1, 0, or +1 as in previous versions of Python; +:func:`!cmp` will only return -1, 0, or +1 as in previous versions of Python; otherwise it will call the appropriate method and can return any Python object. There are also corresponding changes of interest to C programmers; there's a new @@ -245,7 +245,7 @@ use this framework to deprecate old features that they no longer wish to support. -For example, in Python 2.1 the :mod:`regex` module is deprecated, so importing +For example, in Python 2.1 the :mod:`!regex` module is deprecated, so importing it causes a warning to be printed:: >>> import regex @@ -262,7 +262,7 @@ Filters can be added to disable certain warnings; a regular expression pattern can be applied to the message or to the module name in order to suppress a -warning. For example, you may have a program that uses the :mod:`regex` module +warning. For example, you may have a program that uses the :mod:`!regex` module and not want to spare the time to convert it to use the :mod:`re` module right now. The warning can be suppressed by calling :: @@ -274,7 +274,7 @@ This adds a filter that will apply only to warnings of the class :class:`DeprecationWarning` triggered in the :mod:`__main__` module, and applies -a regular expression to only match the message about the :mod:`regex` module +a regular expression to only match the message about the :mod:`!regex` module being deprecated, and will cause such warnings to be ignored. Warnings can also be printed only once, printed every time the offending code is executed, or turned into exceptions that will cause the program to stop (unless the @@ -368,7 +368,7 @@ This version works for simple things such as integers, but it has a side effect; the ``_cache`` dictionary holds a reference to the return values, so they'll never be deallocated until the Python process exits and cleans up. This isn't -very noticeable for integers, but if :func:`f` returns an object, or a data +very noticeable for integers, but if :func:`!f` returns an object, or a data structure that takes up a lot of memory, this can be a problem. Weak references provide a way to implement a cache that won't keep objects alive @@ -379,7 +379,7 @@ returned by calling the weak reference as if it were a function: ``wr()``. It will return the referenced object, or ``None`` if the object no longer exists. -This makes it possible to write a :func:`memoize` function whose cache doesn't +This makes it possible to write a :func:`!memoize` function whose cache doesn't keep objects alive, by storing weak references in the cache. :: _cache = {} @@ -402,7 +402,7 @@ but instead of requiring an explicit call to retrieve the object, the proxy transparently forwards all operations to the object as long as the object still exists. If the object is deallocated, attempting to use a proxy will cause a -:exc:`weakref.ReferenceError` exception to be raised. :: +:exc:`!weakref.ReferenceError` exception to be raised. :: proxy = weakref.proxy(obj) proxy.attr # Equivalent to obj.attr @@ -446,7 +446,7 @@ :attr:`~object.__dict__`. Unlike the :attr:`~object.__dict__` attribute of class instances, in functions you can actually assign a new dictionary to :attr:`~object.__dict__`, though the new value is restricted to a regular Python dictionary; you *can't* be -tricky and set it to a :class:`UserDict` instance, or any other random object +tricky and set it to a :class:`!UserDict` instance, or any other random object that behaves like a mapping. @@ -584,11 +584,11 @@ New and Improved Modules ======================== -* Ka-Ping Yee contributed two new modules: :mod:`inspect.py`, a module for - getting information about live Python code, and :mod:`pydoc.py`, a module for +* Ka-Ping Yee contributed two new modules: :mod:`!inspect.py`, a module for + getting information about live Python code, and :mod:`!pydoc.py`, a module for interactively converting docstrings to HTML or text. As a bonus, :file:`Tools/scripts/pydoc`, which is now automatically installed, uses - :mod:`pydoc.py` to display documentation given a Python module, package, or + :mod:`!pydoc.py` to display documentation given a Python module, package, or class name. For example, ``pydoc xml.dom`` displays the following:: Python Library Documentation: package xml.dom in xml @@ -617,7 +617,7 @@ Kent Beck's Smalltalk testing framework. See https://pyunit.sourceforge.net/ for more information about PyUnit. -* The :mod:`difflib` module contains a class, :class:`SequenceMatcher`, which +* The :mod:`difflib` module contains a class, :class:`~difflib.SequenceMatcher`, which compares two sequences and computes the changes required to transform one sequence into the other. For example, this module can be used to write a tool similar to the Unix :program:`diff` program, and in fact the sample program @@ -633,7 +633,7 @@ 2.1 includes an updated version of the :mod:`xml` package. Some of the noteworthy changes include support for Expat 1.2 and later versions, the ability for Expat parsers to handle files in any encoding supported by Python, and - various bugfixes for SAX, DOM, and the :mod:`minidom` module. + various bugfixes for SAX, DOM, and the :mod:`!minidom` module. * Ping also contributed another hook for handling uncaught exceptions. :func:`sys.excepthook` can be set to a callable object. When an exception isn't @@ -643,8 +643,8 @@ printing an extended traceback that not only lists the stack frames, but also lists the function arguments and the local variables for each frame. -* Various functions in the :mod:`time` module, such as :func:`asctime` and - :func:`localtime`, require a floating point argument containing the time in +* Various functions in the :mod:`time` module, such as :func:`~time.asctime` and + :func:`~time.localtime`, require a floating point argument containing the time in seconds since the epoch. The most common use of these functions is to work with the current time, so the floating point argument has been made optional; when a value isn't provided, the current time will be used. For example, log file @@ -724,10 +724,10 @@ a discussion in comp.lang.python. A new module and method for file objects was also added, contributed by Jeff - Epler. The new method, :meth:`xreadlines`, is similar to the existing - :func:`xrange` built-in. :func:`xreadlines` returns an opaque sequence object + Epler. The new method, :meth:`!xreadlines`, is similar to the existing + :func:`!xrange` built-in. :func:`!xreadlines` returns an opaque sequence object that only supports being iterated over, reading a line on every iteration but - not reading the entire file into memory as the existing :meth:`readlines` method + not reading the entire file into memory as the existing :meth:`!readlines` method does. You'd use it like this:: for line in sys.stdin.xreadlines(): @@ -737,7 +737,7 @@ For a fuller discussion of the line I/O changes, see the python-dev summary for January 1--15, 2001 at https://mail.python.org/pipermail/python-dev/2001-January/. -* A new method, :meth:`popitem`, was added to dictionaries to enable +* A new method, :meth:`~dict.popitem`, was added to dictionaries to enable destructively iterating through the contents of a dictionary; this can be faster for large dictionaries because there's no need to construct a list containing all the keys or values. ``D.popitem()`` removes a random ``(key, value)`` pair diff -Nru python3.11-3.11.8/Doc/whatsnew/2.6.rst python3.11-3.11.9/Doc/whatsnew/2.6.rst --- python3.11-3.11.8/Doc/whatsnew/2.6.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/whatsnew/2.6.rst 2024-04-02 08:25:04.000000000 +0000 @@ -2388,11 +2388,11 @@ using the format character ``'?'``. (Contributed by David Remahl.) -* The :class:`Popen` objects provided by the :mod:`subprocess` module - now have :meth:`terminate`, :meth:`kill`, and :meth:`send_signal` methods. - On Windows, :meth:`send_signal` only supports the :const:`SIGTERM` +* The :class:`~subprocess.Popen` objects provided by the :mod:`subprocess` module + now have :meth:`~subprocess.Popen.terminate`, :meth:`~subprocess.Popen.kill`, and :meth:`~subprocess.Popen.send_signal` methods. + On Windows, :meth:`!send_signal` only supports the :py:const:`~signal.SIGTERM` signal, and all these methods are aliases for the Win32 API function - :c:func:`TerminateProcess`. + :c:func:`!TerminateProcess`. (Contributed by Christian Heimes.) * A new variable in the :mod:`sys` module, :attr:`float_info`, is an @@ -2992,6 +2992,33 @@ architectures (x86, PowerPC), 64-bit (x86-64 and PPC-64), or both. (Contributed by Ronald Oussoren.) +* A new function added in Python 2.6.6, :c:func:`!PySys_SetArgvEx`, sets + the value of ``sys.argv`` and can optionally update ``sys.path`` to + include the directory containing the script named by ``sys.argv[0]`` + depending on the value of an *updatepath* parameter. + + This function was added to close a security hole for applications + that embed Python. The old function, :c:func:`!PySys_SetArgv`, would + always update ``sys.path``, and sometimes it would add the current + directory. This meant that, if you ran an application embedding + Python in a directory controlled by someone else, attackers could + put a Trojan-horse module in the directory (say, a file named + :file:`os.py`) that your application would then import and run. + + If you maintain a C/C++ application that embeds Python, check + whether you're calling :c:func:`!PySys_SetArgv` and carefully consider + whether the application should be using :c:func:`!PySys_SetArgvEx` + with *updatepath* set to false. Note that using this function will + break compatibility with Python versions 2.6.5 and earlier; if you + have to continue working with earlier versions, you can leave + the call to :c:func:`!PySys_SetArgv` alone and call + ``PyRun_SimpleString("sys.path.pop(0)\n")`` afterwards to discard + the first ``sys.path`` component. + + Security issue reported as `CVE-2008-5983 + `_; + discussed in :gh:`50003`, and fixed by Antoine Pitrou. + * The BerkeleyDB module now has a C API object, available as ``bsddb.db.api``. This object can be used by other C extensions that wish to use the :mod:`bsddb` module for their own purposes. @@ -3294,6 +3321,15 @@ scoping rules, also cause warnings because such comparisons are forbidden entirely in 3.0. +For applications that embed Python: + +* The :c:func:`!PySys_SetArgvEx` function was added in Python 2.6.6, + letting applications close a security hole when the existing + :c:func:`!PySys_SetArgv` function was used. Check whether you're + calling :c:func:`!PySys_SetArgv` and carefully consider whether the + application should be using :c:func:`!PySys_SetArgvEx` with + *updatepath* set to false. + .. ====================================================================== diff -Nru python3.11-3.11.8/Doc/whatsnew/2.7.rst python3.11-3.11.9/Doc/whatsnew/2.7.rst --- python3.11-3.11.8/Doc/whatsnew/2.7.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/whatsnew/2.7.rst 2024-04-02 08:25:04.000000000 +0000 @@ -196,7 +196,7 @@ Other new Python3-mode warnings include: -* :func:`operator.isCallable` and :func:`operator.sequenceIncludes`, +* :func:`!operator.isCallable` and :func:`!operator.sequenceIncludes`, which are not supported in 3.x, now trigger warnings. * The :option:`!-3` switch now automatically enables the :option:`!-Qwarn` switch that causes warnings @@ -455,11 +455,11 @@ All this flexibility can require a lot of configuration. You can write Python statements to create objects and set their properties, but a complex set-up requires verbose but boring code. -:mod:`logging` also supports a :func:`~logging.fileConfig` +:mod:`logging` also supports a :func:`~logging.config.fileConfig` function that parses a file, but the file format doesn't support configuring filters, and it's messier to generate programmatically. -Python 2.7 adds a :func:`~logging.dictConfig` function that +Python 2.7 adds a :func:`~logging.config.dictConfig` function that uses a dictionary to configure logging. There are many ways to produce a dictionary from different sources: construct one with code; parse a file containing JSON; or use a YAML parsing library if one is @@ -533,7 +533,7 @@ ``getLogger('app.network.listen')``. * The :class:`~logging.LoggerAdapter` class gained an - :meth:`~logging.LoggerAdapter.isEnabledFor` method that takes a + :meth:`~logging.Logger.isEnabledFor` method that takes a *level* and returns whether the underlying logger would process a message of that level of importance. @@ -554,8 +554,8 @@ It's not possible to change the return values of :meth:`~dict.keys`, :meth:`~dict.values`, and :meth:`~dict.items` in Python 2.7 because too much code would break. Instead the 3.x versions were added -under the new names :meth:`~dict.viewkeys`, :meth:`~dict.viewvalues`, -and :meth:`~dict.viewitems`. +under the new names :meth:`!viewkeys`, :meth:`!viewvalues`, +and :meth:`!viewitems`. :: @@ -720,7 +720,7 @@ with B() as b: ... suite of statements ... - The :func:`contextlib.nested` function provides a very similar + The :func:`!contextlib.nested` function provides a very similar function, so it's no longer necessary and has been deprecated. (Proposed in https://codereview.appspot.com/53094; implemented by @@ -785,7 +785,7 @@ implemented by Mark Dickinson; :issue:`1811`.) * Implicit coercion for complex numbers has been removed; the interpreter - will no longer ever attempt to call a :meth:`__coerce__` method on complex + will no longer ever attempt to call a :meth:`!__coerce__` method on complex objects. (Removed by Meador Inge and Mark Dickinson; :issue:`5211`.) * The :meth:`str.format` method now supports automatic numbering of the replacement @@ -817,7 +817,7 @@ A low-level change: the :meth:`object.__format__` method now triggers a :exc:`PendingDeprecationWarning` if it's passed a format string, - because the :meth:`__format__` method for :class:`object` converts + because the :meth:`!__format__` method for :class:`object` converts the object to a string representation and formats that. Previously the method silently applied the format string to the string representation, but that could hide mistakes in Python code. If @@ -825,7 +825,7 @@ precision, presumably you're expecting the formatting to be applied in some object-specific way. (Fixed by Eric Smith; :issue:`7994`.) -* The :func:`int` and :func:`long` types gained a ``bit_length`` +* The :func:`int` and :func:`!long` types gained a ``bit_length`` method that returns the number of bits necessary to represent its argument in binary:: @@ -848,8 +848,8 @@ statements that were only working by accident. (Fixed by Meador Inge; :issue:`7902`.) -* It's now possible for a subclass of the built-in :class:`unicode` type - to override the :meth:`__unicode__` method. (Implemented by +* It's now possible for a subclass of the built-in :class:`!unicode` type + to override the :meth:`!__unicode__` method. (Implemented by Victor Stinner; :issue:`1583863`.) * The :class:`bytearray` type's :meth:`~bytearray.translate` method now accepts @@ -876,7 +876,7 @@ Forgeot d'Arc in :issue:`1616979`; CP858 contributed by Tim Hatch in :issue:`8016`.) -* The :class:`file` object will now set the :attr:`filename` attribute +* The :class:`!file` object will now set the :attr:`!filename` attribute on the :exc:`IOError` exception when trying to open a directory on POSIX platforms (noted by Jan Kaliszewski; :issue:`4764`), and now explicitly checks for and forbids writing to read-only file objects @@ -966,7 +966,7 @@ Apart from the performance improvements this change should be invisible to end users, with one exception: for testing and - debugging purposes there's a new structseq :data:`sys.long_info` that + debugging purposes there's a new structseq :data:`!sys.long_info` that provides information about the internal format, giving the number of bits per digit and the size in bytes of the C type used to store each digit:: @@ -1005,8 +1005,8 @@ conversion function that supports arbitrary bases. (Patch by Gawain Bolton; :issue:`6713`.) -* The :meth:`split`, :meth:`replace`, :meth:`rindex`, - :meth:`rpartition`, and :meth:`rsplit` methods of string-like types +* The :meth:`!split`, :meth:`!replace`, :meth:`!rindex`, + :meth:`!rpartition`, and :meth:`!rsplit` methods of string-like types (strings, Unicode strings, and :class:`bytearray` objects) now use a fast reverse-search algorithm instead of a character-by-character scan. This is sometimes faster by a factor of 10. (Added by @@ -1044,7 +1044,7 @@ used with :class:`memoryview` instances and other similar buffer objects. (Backported from 3.x by Florent Xicluna; :issue:`7703`.) -* Updated module: the :mod:`bsddb` module has been updated from 4.7.2devel9 +* Updated module: the :mod:`!bsddb` module has been updated from 4.7.2devel9 to version 4.8.4 of `the pybsddb package `__. The new version features better Python 3.x compatibility, various bug fixes, @@ -1129,8 +1129,8 @@ (Added by Raymond Hettinger; :issue:`1818`.) - Finally, the :class:`~collections.Mapping` abstract base class now - returns :const:`NotImplemented` if a mapping is compared to + Finally, the :class:`~collections.abc.Mapping` abstract base class now + returns :data:`NotImplemented` if a mapping is compared to another type that isn't a :class:`Mapping`. (Fixed by Daniel Stutzbach; :issue:`8729`.) @@ -1158,7 +1158,7 @@ (Contributed by Mats Kindahl; :issue:`7005`.) -* Deprecated function: :func:`contextlib.nested`, which allows +* Deprecated function: :func:`!contextlib.nested`, which allows handling more than one context manager with a single :keyword:`with` statement, has been deprecated, because the :keyword:`!with` statement now supports multiple context managers. @@ -1184,7 +1184,7 @@ * New method: the :class:`~decimal.Decimal` class gained a :meth:`~decimal.Decimal.from_float` class method that performs an exact - conversion of a floating-point number to a :class:`~decimal.Decimal`. + conversion of a floating-point number to a :class:`!Decimal`. This exact conversion strives for the closest decimal approximation to the floating-point representation's value; the resulting decimal value will therefore still include the inaccuracy, @@ -1198,9 +1198,9 @@ of the operands. Previously such comparisons would fall back to Python's default rules for comparing objects, which produced arbitrary results based on their type. Note that you still cannot combine - :class:`Decimal` and floating-point in other operations such as addition, + :class:`!Decimal` and floating-point in other operations such as addition, since you should be explicitly choosing how to convert between float and - :class:`~decimal.Decimal`. (Fixed by Mark Dickinson; :issue:`2531`.) + :class:`!Decimal`. (Fixed by Mark Dickinson; :issue:`2531`.) The constructor for :class:`~decimal.Decimal` now accepts floating-point numbers (added by Raymond Hettinger; :issue:`8257`) @@ -1218,7 +1218,7 @@ more sensible for numeric types. (Changed by Mark Dickinson; :issue:`6857`.) Comparisons involving a signaling NaN value (or ``sNAN``) now signal - :const:`InvalidOperation` instead of silently returning a true or + :const:`~decimal.InvalidOperation` instead of silently returning a true or false value depending on the comparison operator. Quiet NaN values (or ``NaN``) are now hashable. (Fixed by Mark Dickinson; :issue:`7279`.) @@ -1235,13 +1235,13 @@ created some new files that should be included. (Fixed by Tarek Ziadé; :issue:`8688`.) -* The :mod:`doctest` module's :const:`IGNORE_EXCEPTION_DETAIL` flag +* The :mod:`doctest` module's :const:`~doctest.IGNORE_EXCEPTION_DETAIL` flag will now ignore the name of the module containing the exception being tested. (Patch by Lennart Regebro; :issue:`7490`.) * The :mod:`email` module's :class:`~email.message.Message` class will now accept a Unicode-valued payload, automatically converting the - payload to the encoding specified by :attr:`output_charset`. + payload to the encoding specified by :attr:`!output_charset`. (Added by R. David Murray; :issue:`1368247`.) * The :class:`~fractions.Fraction` class now accepts a single float or @@ -1268,10 +1268,10 @@ :issue:`6845`.) * New class decorator: :func:`~functools.total_ordering` in the :mod:`functools` - module takes a class that defines an :meth:`__eq__` method and one of - :meth:`__lt__`, :meth:`__le__`, :meth:`__gt__`, or :meth:`__ge__`, + module takes a class that defines an :meth:`~object.__eq__` method and one of + :meth:`~object.__lt__`, :meth:`~object.__le__`, :meth:`~object.__gt__`, or :meth:`~object.__ge__`, and generates the missing comparison methods. Since the - :meth:`__cmp__` method is being deprecated in Python 3.x, + :meth:`!__cmp__` method is being deprecated in Python 3.x, this decorator makes it easier to define ordered classes. (Added by Raymond Hettinger; :issue:`5479`.) @@ -1300,7 +1300,7 @@ :mod:`gzip` module will now consume these trailing bytes. (Fixed by Tadek Pietraszek and Brian Curtin; :issue:`2846`.) -* New attribute: the :mod:`hashlib` module now has an :attr:`~hashlib.hashlib.algorithms` +* New attribute: the :mod:`hashlib` module now has an :attr:`!algorithms` attribute containing a tuple naming the supported algorithms. In Python 2.7, ``hashlib.algorithms`` contains ``('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')``. @@ -1347,10 +1347,10 @@ * Updated module: The :mod:`io` library has been upgraded to the version shipped with Python 3.1. For 3.1, the I/O library was entirely rewritten in C and is 2 to 20 times faster depending on the task being performed. The - original Python version was renamed to the :mod:`_pyio` module. + original Python version was renamed to the :mod:`!_pyio` module. One minor resulting change: the :class:`io.TextIOBase` class now - has an :attr:`errors` attribute giving the error setting + has an :attr:`~io.TextIOBase.errors` attribute giving the error setting used for encoding and decoding errors (one of ``'strict'``, ``'replace'``, ``'ignore'``). @@ -1422,10 +1422,10 @@ passed to the callable. (Contributed by lekma; :issue:`5585`.) - The :class:`~multiprocessing.Pool` class, which controls a pool of worker processes, + The :class:`~multiprocessing.pool.Pool` class, which controls a pool of worker processes, now has an optional *maxtasksperchild* parameter. Worker processes will perform the specified number of tasks and then exit, causing the - :class:`~multiprocessing.Pool` to start a new worker. This is useful if tasks may leak + :class:`!Pool` to start a new worker. This is useful if tasks may leak memory or other resources, or if some tasks will cause the worker to become very large. (Contributed by Charles Cazabon; :issue:`6963`.) @@ -1497,7 +1497,7 @@ global site-packages directories, :func:`~site.getusersitepackages` returns the path of the user's site-packages directory, and - :func:`~site.getuserbase` returns the value of the :envvar:`USER_BASE` + :func:`~site.getuserbase` returns the value of the :data:`~site.USER_BASE` environment variable, giving the path to a directory that can be used to store data. (Contributed by Tarek Ziadé; :issue:`6693`.) @@ -1539,11 +1539,11 @@ * The :mod:`ssl` module's :class:`~ssl.SSLSocket` objects now support the buffer API, which fixed a test suite failure (fix by Antoine Pitrou; :issue:`7133`) and automatically set - OpenSSL's :c:macro:`SSL_MODE_AUTO_RETRY`, which will prevent an error + OpenSSL's :c:macro:`!SSL_MODE_AUTO_RETRY`, which will prevent an error code being returned from :meth:`recv` operations that trigger an SSL renegotiation (fix by Antoine Pitrou; :issue:`8222`). - The :func:`ssl.wrap_socket` constructor function now takes a + The :func:`~ssl.SSLContext.wrap_socket` constructor function now takes a *ciphers* argument that's a string listing the encryption algorithms to be allowed; the format of the string is described `in the OpenSSL documentation @@ -1567,8 +1567,8 @@ code (one of ``bBhHiIlLqQ``); it now always raises a :exc:`struct.error` exception. (Changed by Mark Dickinson; :issue:`1523`.) The :func:`~struct.pack` function will also - attempt to use :meth:`__index__` to convert and pack non-integers - before trying the :meth:`__int__` method or reporting an error. + attempt to use :meth:`~object.__index__` to convert and pack non-integers + before trying the :meth:`~object.__int__` method or reporting an error. (Changed by Mark Dickinson; :issue:`8300`.) * New function: the :mod:`subprocess` module's @@ -1589,7 +1589,7 @@ (Contributed by Gregory P. Smith.) The :mod:`subprocess` module will now retry its internal system calls - on receiving an :const:`EINTR` signal. (Reported by several people; final + on receiving an :const:`~errno.EINTR` signal. (Reported by several people; final patch by Gregory P. Smith in :issue:`1068268`.) * New function: :func:`~symtable.Symbol.is_declared_global` in the :mod:`symtable` module @@ -1601,16 +1601,16 @@ identifier instead of the previous default value of ``'python'``. (Changed by Sean Reifschneider; :issue:`8451`.) -* The ``sys.version_info`` value is now a named tuple, with attributes - named :attr:`major`, :attr:`minor`, :attr:`micro`, - :attr:`releaselevel`, and :attr:`serial`. (Contributed by Ross +* The :attr:`sys.version_info` value is now a named tuple, with attributes + named :attr:`!major`, :attr:`!minor`, :attr:`!micro`, + :attr:`!releaselevel`, and :attr:`!serial`. (Contributed by Ross Light; :issue:`4285`.) :func:`sys.getwindowsversion` also returns a named tuple, - with attributes named :attr:`major`, :attr:`minor`, :attr:`build`, - :attr:`platform`, :attr:`service_pack`, :attr:`service_pack_major`, - :attr:`service_pack_minor`, :attr:`suite_mask`, and - :attr:`product_type`. (Contributed by Brian Curtin; :issue:`7766`.) + with attributes named :attr:`!major`, :attr:`!minor`, :attr:`!build`, + :attr:`!platform`, :attr:`!service_pack`, :attr:`!service_pack_major`, + :attr:`!service_pack_minor`, :attr:`!suite_mask`, and + :attr:`!product_type`. (Contributed by Brian Curtin; :issue:`7766`.) * The :mod:`tarfile` module's default error handling has changed, to no longer suppress fatal errors. The default error level was previously 0, @@ -1690,7 +1690,7 @@ (Originally implemented in Python 3.x by Raymond Hettinger, and backported to 2.7 by Michael Foord.) -* The ElementTree library, :mod:`xml.etree`, no longer escapes +* The :mod:`xml.etree.ElementTree` library, no longer escapes ampersands and angle brackets when outputting an XML processing instruction (which looks like ````) or comment (which looks like ````). @@ -1700,8 +1700,8 @@ :mod:`SimpleXMLRPCServer ` modules, have improved performance by supporting HTTP/1.1 keep-alive and by optionally using gzip encoding to compress the XML being exchanged. The gzip compression is - controlled by the :attr:`encode_threshold` attribute of - :class:`SimpleXMLRPCRequestHandler`, which contains a size in bytes; + controlled by the :attr:`!encode_threshold` attribute of + :class:`~xmlrpc.server.SimpleXMLRPCRequestHandler`, which contains a size in bytes; responses larger than this will be compressed. (Contributed by Kristján Valur Jónsson; :issue:`6267`.) @@ -1712,7 +1712,8 @@ :mod:`zipfile` now also supports archiving empty directories and extracts them correctly. (Fixed by Kuba Wieczorek; :issue:`4710`.) Reading files out of an archive is faster, and interleaving - :meth:`~zipfile.ZipFile.read` and :meth:`~zipfile.ZipFile.readline` now works correctly. + :meth:`read() ` and + :meth:`readline() ` now works correctly. (Contributed by Nir Aides; :issue:`7610`.) The :func:`~zipfile.is_zipfile` function now @@ -1806,14 +1807,14 @@ set was originally called Tile, but was renamed to Ttk (for "themed Tk") on being added to Tcl/Tck release 8.5. -To learn more, read the :mod:`ttk` module documentation. You may also +To learn more, read the :mod:`~tkinter.ttk` module documentation. You may also wish to read the Tcl/Tk manual page describing the Ttk theme engine, available at -https://www.tcl.tk/man/tcl8.5/TkCmd/ttk_intro.htm. Some +https://www.tcl.tk/man/tcl8.5/TkCmd/ttk_intro.html. Some screenshots of the Python/Ttk code in use are at https://code.google.com/archive/p/python-ttk/wikis/Screenshots.wiki. -The :mod:`ttk` module was written by Guilherme Polo and added in +The :mod:`tkinter.ttk` module was written by Guilherme Polo and added in :issue:`2983`. An alternate version called ``Tile.py``, written by Martin Franklin and maintained by Kevin Walzer, was proposed for inclusion in :issue:`2618`, but the authors argued that Guilherme @@ -1829,7 +1830,7 @@ new features were added. Most of these features were implemented by Michael Foord, unless otherwise noted. The enhanced version of the module is downloadable separately for use with Python versions 2.4 to 2.6, -packaged as the :mod:`unittest2` package, from +packaged as the :mod:`!unittest2` package, from https://pypi.org/project/unittest2. When used from the command line, the module can automatically discover @@ -1937,19 +1938,20 @@ differences in the two strings. This comparison is now used by default when Unicode strings are compared with :meth:`~unittest.TestCase.assertEqual`. -* :meth:`~unittest.TestCase.assertRegexpMatches` and - :meth:`~unittest.TestCase.assertNotRegexpMatches` checks whether the +* :meth:`assertRegexpMatches() ` and + :meth:`assertNotRegexpMatches() ` checks whether the first argument is a string matching or not matching the regular expression provided as the second argument (:issue:`8038`). -* :meth:`~unittest.TestCase.assertRaisesRegexp` checks whether a particular exception +* :meth:`assertRaisesRegexp() ` checks + whether a particular exception is raised, and then also checks that the string representation of the exception matches the provided regular expression. * :meth:`~unittest.TestCase.assertIn` and :meth:`~unittest.TestCase.assertNotIn` tests whether *first* is or is not in *second*. -* :meth:`~unittest.TestCase.assertItemsEqual` tests whether two provided sequences +* :meth:`assertItemsEqual() ` tests whether two provided sequences contain the same elements. * :meth:`~unittest.TestCase.assertSetEqual` compares whether two sets are equal, and @@ -1965,7 +1967,7 @@ * :meth:`~unittest.TestCase.assertDictEqual` compares two dictionaries and reports the differences; it's now used by default when you compare two dictionaries - using :meth:`~unittest.TestCase.assertEqual`. :meth:`~unittest.TestCase.assertDictContainsSubset` checks whether + using :meth:`~unittest.TestCase.assertEqual`. :meth:`!assertDictContainsSubset` checks whether all of the key/value pairs in *first* are found in *second*. * :meth:`~unittest.TestCase.assertAlmostEqual` and :meth:`~unittest.TestCase.assertNotAlmostEqual` test @@ -2022,8 +2024,8 @@ p = ET.XMLParser(encoding='utf-8') t = ET.XML("""""", parser=p) - Errors in parsing XML now raise a :exc:`ParseError` exception, whose - instances have a :attr:`position` attribute + Errors in parsing XML now raise a :exc:`~xml.etree.ElementTree.ParseError` exception, whose + instances have a :attr:`!position` attribute containing a (*line*, *column*) tuple giving the location of the problem. * ElementTree's code for converting trees to a string has been @@ -2033,7 +2035,8 @@ "xml" (the default), "html", or "text". HTML mode will output empty elements as ```` instead of ````, and text mode will skip over elements and only output the text chunks. If - you set the :attr:`tag` attribute of an element to ``None`` but + you set the :attr:`~xml.etree.ElementTree.Element.tag` attribute of an + element to ``None`` but leave its children in place, the element will be omitted when the tree is written out, so you don't need to do more extensive rearrangement to remove a single element. @@ -2063,14 +2066,14 @@ # Outputs 1... print ET.tostring(new) -* New :class:`Element` method: +* New :class:`~xml.etree.ElementTree.Element` method: :meth:`~xml.etree.ElementTree.Element.iter` yields the children of the element as a generator. It's also possible to write ``for child in elem:`` to loop over an element's children. The existing method - :meth:`getiterator` is now deprecated, as is :meth:`getchildren` + :meth:`!getiterator` is now deprecated, as is :meth:`!getchildren` which constructs and returns a list of children. -* New :class:`Element` method: +* New :class:`~xml.etree.ElementTree.Element` method: :meth:`~xml.etree.ElementTree.Element.itertext` yields all chunks of text that are descendants of the element. For example:: @@ -2226,7 +2229,7 @@ (Fixed by Thomas Wouters; :issue:`1590864`.) * The :c:func:`Py_Finalize` function now calls the internal - :func:`threading._shutdown` function; this prevents some exceptions from + :func:`!threading._shutdown` function; this prevents some exceptions from being raised when an interpreter shuts down. (Patch by Adam Olsen; :issue:`1722344`.) @@ -2241,7 +2244,7 @@ Heller; :issue:`3102`.) * New configure option: the :option:`!--with-system-expat` switch allows - building the :mod:`pyexpat` module to use the system Expat library. + building the :mod:`pyexpat ` module to use the system Expat library. (Contributed by Arfrever Frehtes Taifersar Arahesis; :issue:`7609`.) * New configure option: the @@ -2328,9 +2331,9 @@ * The :mod:`msvcrt` module now contains some constants from the :file:`crtassem.h` header file: - :data:`CRT_ASSEMBLY_VERSION`, - :data:`VC_ASSEMBLY_PUBLICKEYTOKEN`, - and :data:`LIBRARIES_ASSEMBLY_NAME_PREFIX`. + :data:`~msvcrt.CRT_ASSEMBLY_VERSION`, + :data:`~msvcrt.VC_ASSEMBLY_PUBLICKEYTOKEN`, + and :data:`~msvcrt.LIBRARIES_ASSEMBLY_NAME_PREFIX`. (Contributed by David Cournapeau; :issue:`4365`.) * The :mod:`_winreg ` module for accessing the registry now implements @@ -2341,21 +2344,21 @@ were also tested and documented. (Implemented by Brian Curtin: :issue:`7347`.) -* The new :c:func:`_beginthreadex` API is used to start threads, and +* The new :c:func:`!_beginthreadex` API is used to start threads, and the native thread-local storage functions are now used. (Contributed by Kristján Valur Jónsson; :issue:`3582`.) * The :func:`os.kill` function now works on Windows. The signal value - can be the constants :const:`CTRL_C_EVENT`, - :const:`CTRL_BREAK_EVENT`, or any integer. The first two constants + can be the constants :const:`~signal.CTRL_C_EVENT`, + :const:`~signal.CTRL_BREAK_EVENT`, or any integer. The first two constants will send :kbd:`Control-C` and :kbd:`Control-Break` keystroke events to - subprocesses; any other value will use the :c:func:`TerminateProcess` + subprocesses; any other value will use the :c:func:`!TerminateProcess` API. (Contributed by Miki Tebeka; :issue:`1220212`.) * The :func:`os.listdir` function now correctly fails for an empty path. (Fixed by Hirokazu Yamamoto; :issue:`5913`.) -* The :mod:`mimelib` module will now read the MIME database from +* The :mod:`mimetypes` module will now read the MIME database from the Windows registry when initializing. (Patch by Gabriel Genellina; :issue:`4969`.) @@ -2384,7 +2387,7 @@ Port-Specific Changes: FreeBSD ----------------------------------- -* FreeBSD 7.1's :const:`SO_SETFIB` constant, used with the :func:`~socket.socket` methods +* FreeBSD 7.1's :const:`!SO_SETFIB` constant, used with the :func:`~socket.socket` methods :func:`~socket.socket.getsockopt`/:func:`~socket.socket.setsockopt` to select an alternate routing table, is now available in the :mod:`socket` module. (Added by Kyle VanderBeek; :issue:`8235`.) @@ -2440,7 +2443,7 @@ that may require changes to your code: * The :func:`range` function processes its arguments more - consistently; it will now call :meth:`__int__` on non-float, + consistently; it will now call :meth:`~object.__int__` on non-float, non-integer arguments that are supplied to it. (Fixed by Alexander Belopolsky; :issue:`1533`.) @@ -2485,13 +2488,13 @@ (or ``NaN``) are now hashable. (Fixed by Mark Dickinson; :issue:`7279`.) -* The ElementTree library, :mod:`xml.etree`, no longer escapes +* The :mod:`xml.etree.ElementTree` library no longer escapes ampersands and angle brackets when outputting an XML processing instruction (which looks like ````) or comment (which looks like ````). (Patch by Neil Muller; :issue:`2746`.) -* The :meth:`~StringIO.StringIO.readline` method of :class:`~StringIO.StringIO` objects now does +* The :meth:`!readline` method of :class:`~io.StringIO` objects now does nothing when a negative length is requested, as other file-like objects do. (:issue:`7348`). @@ -2576,11 +2579,11 @@ -------------------------------------------- In debug mode, the ``[xxx refs]`` statistic is not written by default, the -:envvar:`PYTHONSHOWREFCOUNT` environment variable now must also be set. +:envvar:`!PYTHONSHOWREFCOUNT` environment variable now must also be set. (Contributed by Victor Stinner; :issue:`31733`.) When Python is compiled with ``COUNT_ALLOC`` defined, allocation counts are no -longer dumped by default anymore: the :envvar:`PYTHONSHOWALLOCCOUNT` environment +longer dumped by default anymore: the :envvar:`!PYTHONSHOWALLOCCOUNT` environment variable must now also be set. Moreover, allocation counts are now dumped into stderr, rather than stdout. (Contributed by Victor Stinner; :issue:`31692`.) @@ -2711,7 +2714,8 @@ ----------------------------------------------------------------------------- :pep:`476` updated :mod:`httplib ` and modules which use it, such as -:mod:`urllib2 ` and :mod:`xmlrpclib`, to now verify that the server +:mod:`urllib2 ` and :mod:`xmlrpclib `, to now +verify that the server presents a certificate which is signed by a Certificate Authority in the platform trust store and whose hostname matches the hostname being requested by default, significantly improving security for many applications. This @@ -2752,7 +2756,7 @@ and earlier. For cases where the connection establishment code can't be modified, but the -overall application can be, the new :func:`ssl._https_verify_certificates` +overall application can be, the new :func:`!ssl._https_verify_certificates` function can be used to adjust the default behaviour at runtime. diff -Nru python3.11-3.11.8/Doc/whatsnew/3.1.rst python3.11-3.11.9/Doc/whatsnew/3.1.rst --- python3.11-3.11.8/Doc/whatsnew/3.1.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/whatsnew/3.1.rst 2024-04-02 08:25:04.000000000 +0000 @@ -80,6 +80,28 @@ PEP written by Armin Ronacher and Raymond Hettinger. Implementation written by Raymond Hettinger. +Since an ordered dictionary remembers its insertion order, it can be used +in conjunction with sorting to make a sorted dictionary:: + + >>> # regular unsorted dictionary + >>> d = {'banana': 3, 'apple':4, 'pear': 1, 'orange': 2} + + >>> # dictionary sorted by key + >>> OrderedDict(sorted(d.items(), key=lambda t: t[0])) + OrderedDict([('apple', 4), ('banana', 3), ('orange', 2), ('pear', 1)]) + + >>> # dictionary sorted by value + >>> OrderedDict(sorted(d.items(), key=lambda t: t[1])) + OrderedDict([('pear', 1), ('orange', 2), ('banana', 3), ('apple', 4)]) + + >>> # dictionary sorted by length of the key string + >>> OrderedDict(sorted(d.items(), key=lambda t: len(t[0]))) + OrderedDict([('pear', 1), ('apple', 4), ('orange', 2), ('banana', 3)]) + +The new sorted dictionaries maintain their sort order when entries +are deleted. But when new keys are added, the keys are appended +to the end and the sort is not maintained. + PEP 378: Format Specifier for Thousands Separator ================================================= @@ -152,7 +174,7 @@ (Contributed by Eric Smith; :issue:`5237`.) -* The :func:`string.maketrans` function is deprecated and is replaced by new +* The :func:`!string.maketrans` function is deprecated and is replaced by new static methods, :meth:`bytes.maketrans` and :meth:`bytearray.maketrans`. This change solves the confusion around which types were supported by the :mod:`string` module. Now, :class:`str`, :class:`bytes`, and @@ -169,7 +191,7 @@ ... if '' in line: ... outfile.write(line) - With the new syntax, the :func:`contextlib.nested` function is no longer + With the new syntax, the :func:`!contextlib.nested` function is no longer needed and is now deprecated. (Contributed by Georg Brandl and Mattias Brändström; @@ -359,16 +381,20 @@ x / 0 In addition, several new assertion methods were added including - :func:`assertSetEqual`, :func:`assertDictEqual`, - :func:`assertDictContainsSubset`, :func:`assertListEqual`, - :func:`assertTupleEqual`, :func:`assertSequenceEqual`, - :func:`assertRaisesRegexp`, :func:`assertIsNone`, - and :func:`assertIsNotNone`. + :meth:`~unittest.TestCase.assertSetEqual`, + :meth:`~unittest.TestCase.assertDictEqual`, + :meth:`!assertDictContainsSubset`, + :meth:`~unittest.TestCase.assertListEqual`, + :meth:`~unittest.TestCase.assertTupleEqual`, + :meth:`~unittest.TestCase.assertSequenceEqual`, + :meth:`assertRaisesRegexp() `, + :meth:`~unittest.TestCase.assertIsNone`, + and :meth:`~unittest.TestCase.assertIsNotNone`. (Contributed by Benjamin Peterson and Antoine Pitrou.) -* The :mod:`io` module has three new constants for the :meth:`seek` - method :data:`SEEK_SET`, :data:`SEEK_CUR`, and :data:`SEEK_END`. +* The :mod:`io` module has three new constants for the :meth:`~io.IOBase.seek` + method: :data:`~os.SEEK_SET`, :data:`~os.SEEK_CUR`, and :data:`~os.SEEK_END`. * The :data:`sys.version_info` tuple is now a named tuple:: diff -Nru python3.11-3.11.8/Doc/whatsnew/3.10.rst python3.11-3.11.9/Doc/whatsnew/3.10.rst --- python3.11-3.11.8/Doc/whatsnew/3.10.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/whatsnew/3.10.rst 2024-04-02 08:25:04.000000000 +0000 @@ -827,7 +827,7 @@ :meth:`~object.__index__` method). (Contributed by Serhiy Storchaka in :issue:`37999`.) -* If :func:`object.__ipow__` returns :const:`NotImplemented`, the operator will +* If :func:`object.__ipow__` returns :data:`NotImplemented`, the operator will correctly fall back to :func:`object.__pow__` and :func:`object.__rpow__` as expected. (Contributed by Alex Shkop in :issue:`38302`.) @@ -1516,6 +1516,13 @@ documentation. (Contributed by Adam Goldschmidt, Senthil Kumaran and Ken Jin in :issue:`42967`.) +The presence of newline or tab characters in parts of a URL allows for some +forms of attacks. Following the WHATWG specification that updates :rfc:`3986`, +ASCII newline ``\n``, ``\r`` and tab ``\t`` characters are stripped from the +URL by the parser in :mod:`urllib.parse` preventing such attacks. The removal +characters are controlled by a new module level variable +``urllib.parse._UNSAFE_URL_BYTES_TO_REMOVE``. (See :gh:`88048`) + xml --- @@ -2314,3 +2321,43 @@ * The ``PyThreadState.use_tracing`` member has been removed to optimize Python. (Contributed by Mark Shannon in :issue:`43760`.) + + +Notable security feature in 3.10.7 +================================== + +Converting between :class:`int` and :class:`str` in bases other than 2 +(binary), 4, 8 (octal), 16 (hexadecimal), or 32 such as base 10 (decimal) +now raises a :exc:`ValueError` if the number of digits in string form is +above a limit to avoid potential denial of service attacks due to the +algorithmic complexity. This is a mitigation for `CVE-2020-10735 +`_. +This limit can be configured or disabled by environment variable, command +line flag, or :mod:`sys` APIs. See the :ref:`integer string conversion +length limitation ` documentation. The default limit +is 4300 digits in string form. + +Notable security feature in 3.10.8 +================================== + +The deprecated :mod:`!mailcap` module now refuses to inject unsafe text +(filenames, MIME types, parameters) into shell commands. Instead of using such +text, it will warn and act as if a match was not found (or for test commands, +as if the test failed). +(Contributed by Petr Viktorin in :gh:`98966`.) + +Notable changes in 3.10.12 +========================== + +tarfile +------- + +* The extraction methods in :mod:`tarfile`, and :func:`shutil.unpack_archive`, + have a new a *filter* argument that allows limiting tar features than may be + surprising or dangerous, such as creating files outside the destination + directory. + See :ref:`tarfile-extraction-filter` for details. + In Python 3.12, use without the *filter* argument will show a + :exc:`DeprecationWarning`. + In Python 3.14, the default will switch to ``'data'``. + (Contributed by Petr Viktorin in :pep:`706`.) diff -Nru python3.11-3.11.8/Doc/whatsnew/3.2.rst python3.11-3.11.9/Doc/whatsnew/3.2.rst --- python3.11-3.11.8/Doc/whatsnew/3.2.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/whatsnew/3.2.rst 2024-04-02 08:25:04.000000000 +0000 @@ -344,8 +344,8 @@ * The :mod:`importlib.abc` module has been updated with new :term:`abstract base classes ` for loading bytecode files. The obsolete - ABCs, :class:`~importlib.abc.PyLoader` and - :class:`~importlib.abc.PyPycLoader`, have been deprecated (instructions on how + ABCs, :class:`!PyLoader` and + :class:`!PyPycLoader`, have been deprecated (instructions on how to stay Python 3.1 compatible are included with the documentation). .. seealso:: @@ -401,7 +401,7 @@ points between *U+0000* through *U+00FF* which are translatable to bytes using *Latin-1* encoding. These strings are used for the keys and values in the environment dictionary and for response headers and statuses in the -:func:`start_response` function. They must follow :rfc:`2616` with respect to +:func:`!start_response` function. They must follow :rfc:`2616` with respect to encoding. That is, they must either be *ISO-8859-1* characters or use :rfc:`2047` MIME encoding. @@ -415,8 +415,8 @@ encoded in utf-8 was using ``h.encode('utf-8')`` now needs to convert from bytes to native strings using ``h.encode('utf-8').decode('latin-1')``. -* Values yielded by an application or sent using the :meth:`write` method - must be byte strings. The :func:`start_response` function and environ +* Values yielded by an application or sent using the :meth:`!write` method + must be byte strings. The :func:`!start_response` function and environ must use native strings. The two cannot be mixed. For server implementers writing CGI-to-WSGI pathways or other CGI-style @@ -497,7 +497,7 @@ * The :func:`hasattr` function works by calling :func:`getattr` and detecting whether an exception is raised. This technique allows it to detect methods - created dynamically by :meth:`__getattr__` or :meth:`__getattribute__` which + created dynamically by :meth:`~object.__getattr__` or :meth:`~object.__getattribute__` which would otherwise be absent from the class dictionary. Formerly, *hasattr* would catch any exception, possibly masking genuine errors. Now, *hasattr* has been tightened to only catch :exc:`AttributeError` and let other @@ -618,7 +618,7 @@ * :class:`range` objects now support *index* and *count* methods. This is part of an effort to make more objects fully implement the - :class:`collections.Sequence` :term:`abstract base class`. As a result, the + :class:`collections.Sequence ` :term:`abstract base class`. As a result, the language will have a more uniform API. In addition, :class:`range` objects now support slicing and negative indices, even with values larger than :data:`sys.maxsize`. This makes *range* more interoperable with lists:: @@ -718,7 +718,7 @@ elementtree ----------- -The :mod:`xml.etree.ElementTree` package and its :mod:`xml.etree.cElementTree` +The :mod:`xml.etree.ElementTree` package and its :mod:`!xml.etree.cElementTree` counterpart have been updated to version 1.3. Several new and useful functions and methods have been added: @@ -741,8 +741,8 @@ Two methods have been deprecated: -* :meth:`xml.etree.ElementTree.getchildren` use ``list(elem)`` instead. -* :meth:`xml.etree.ElementTree.getiterator` use ``Element.iter`` instead. +* :meth:`!xml.etree.ElementTree.getchildren` use ``list(elem)`` instead. +* :meth:`!xml.etree.ElementTree.getiterator` use ``Element.iter`` instead. For details of the update, see `Introducing ElementTree `_ @@ -1006,13 +1006,13 @@ after 1900. The new supported year range is from 1000 to 9999 inclusive. * Whenever a two-digit year is used in a time tuple, the interpretation has been - governed by :data:`time.accept2dyear`. The default is ``True`` which means that + governed by :data:`!time.accept2dyear`. The default is ``True`` which means that for a two-digit year, the century is guessed according to the POSIX rules governing the ``%y`` strptime format. Starting with Py3.2, use of the century guessing heuristic will emit a :exc:`DeprecationWarning`. Instead, it is recommended that - :data:`time.accept2dyear` be set to ``False`` so that large date ranges + :data:`!time.accept2dyear` be set to ``False`` so that large date ranges can be used without guesswork:: >>> import time, warnings @@ -1030,7 +1030,7 @@ 'Fri Jan 1 12:34:56 11' Several functions now have significantly expanded date ranges. When - :data:`time.accept2dyear` is false, the :func:`time.asctime` function will + :data:`!time.accept2dyear` is false, the :func:`time.asctime` function will accept any year that fits in a C int, while the :func:`time.mktime` and :func:`time.strftime` functions will accept the full range supported by the corresponding operating system functions. @@ -1146,15 +1146,15 @@ reprlib ------- -When writing a :meth:`__repr__` method for a custom container, it is easy to +When writing a :meth:`~object.__repr__` method for a custom container, it is easy to forget to handle the case where a member refers back to the container itself. Python's builtin objects such as :class:`list` and :class:`set` handle self-reference by displaying "..." in the recursive part of the representation string. -To help write such :meth:`__repr__` methods, the :mod:`reprlib` module has a new +To help write such :meth:`~object.__repr__` methods, the :mod:`reprlib` module has a new decorator, :func:`~reprlib.recursive_repr`, for detecting recursive calls to -:meth:`__repr__` and substituting a placeholder string instead:: +:meth:`!__repr__` and substituting a placeholder string instead:: >>> class MyList(list): ... @recursive_repr() @@ -1306,7 +1306,7 @@ >>> sys.hash_info # doctest: +SKIP sys.hash_info(width=64, modulus=2305843009213693951, inf=314159, nan=0, imag=1000003) -An early decision to limit the inter-operability of various numeric types has +An early decision to limit the interoperability of various numeric types has been relaxed. It is still unsupported (and ill-advised) to have implicit mixing in arithmetic expressions such as ``Decimal('1.1') + float('1.1')`` because the latter loses information in the process of constructing the binary @@ -1334,7 +1334,7 @@ Fraction(2476979795053773, 2251799813685248) Another useful change for the :mod:`decimal` module is that the -:attr:`Context.clamp` attribute is now public. This is useful in creating +:attr:`Context.clamp ` attribute is now public. This is useful in creating contexts that correspond to the decimal interchange formats specified in IEEE 754 (see :issue:`8540`). @@ -1426,7 +1426,7 @@ Aides and Brian Curtin in :issue:`9962`, :issue:`1675951`, :issue:`7471` and :issue:`2846`.) -Also, the :class:`zipfile.ZipExtFile` class was reworked internally to represent +Also, the :class:`zipfile.ZipExtFile ` class was reworked internally to represent files stored inside an archive. The new implementation is significantly faster and can be wrapped in an :class:`io.BufferedReader` object for more speedups. It also solves an issue where interleaved calls to *read* and *readline* gave the @@ -1594,7 +1594,7 @@ The :mod:`sqlite3` module was updated to pysqlite version 2.6.0. It has two new capabilities. -* The :attr:`sqlite3.Connection.in_transit` attribute is true if there is an +* The :attr:`!sqlite3.Connection.in_transit` attribute is true if there is an active transaction for uncommitted changes. * The :meth:`sqlite3.Connection.enable_load_extension` and @@ -1641,11 +1641,11 @@ other options. It includes a :meth:`~ssl.SSLContext.wrap_socket` for creating an SSL socket from an SSL context. -* A new function, :func:`ssl.match_hostname`, supports server identity +* A new function, :func:`!ssl.match_hostname`, supports server identity verification for higher-level protocols by implementing the rules of HTTPS (from :rfc:`2818`) which are also suitable for other protocols. -* The :func:`ssl.wrap_socket` constructor function now takes a *ciphers* +* The :func:`ssl.wrap_socket() ` constructor function now takes a *ciphers* argument. The *ciphers* string lists the allowed encryption algorithms using the format described in the `OpenSSL documentation `__. @@ -1757,7 +1757,7 @@ (Contributed by Michael Foord.) * Experimentation at the interactive prompt is now easier because the - :class:`unittest.case.TestCase` class can now be instantiated without + :class:`unittest.TestCase` class can now be instantiated without arguments: >>> from unittest import TestCase @@ -1795,7 +1795,7 @@ * In addition, the method names in the module have undergone a number of clean-ups. For example, :meth:`~unittest.TestCase.assertRegex` is the new name for - :meth:`~unittest.TestCase.assertRegexpMatches` which was misnamed because the + :meth:`!assertRegexpMatches` which was misnamed because the test uses :func:`re.search`, not :func:`re.match`. Other methods using regular expressions are now named using short form "Regex" in preference to "Regexp" -- this matches the names used in other unittest implementations, @@ -1810,11 +1810,11 @@ =============================== ============================== Old Name Preferred Name =============================== ============================== - :meth:`assert_` :meth:`.assertTrue` - :meth:`assertEquals` :meth:`.assertEqual` - :meth:`assertNotEquals` :meth:`.assertNotEqual` - :meth:`assertAlmostEquals` :meth:`.assertAlmostEqual` - :meth:`assertNotAlmostEquals` :meth:`.assertNotAlmostEqual` + :meth:`!assert_` :meth:`.assertTrue` + :meth:`!assertEquals` :meth:`.assertEqual` + :meth:`!assertNotEquals` :meth:`.assertNotEqual` + :meth:`!assertAlmostEquals` :meth:`.assertAlmostEqual` + :meth:`!assertNotAlmostEquals` :meth:`.assertNotAlmostEqual` =============================== ============================== Likewise, the ``TestCase.fail*`` methods deprecated in Python 3.1 are expected @@ -1823,7 +1823,7 @@ (Contributed by Ezio Melotti; :issue:`9424`.) -* The :meth:`~unittest.TestCase.assertDictContainsSubset` method was deprecated +* The :meth:`!assertDictContainsSubset` method was deprecated because it was misimplemented with the arguments in the wrong order. This created hard-to-debug optical illusions where tests like ``TestCase().assertDictContainsSubset({'a':1, 'b':2}, {'a':1})`` would fail. @@ -1995,7 +1995,7 @@ dbm --- -All database modules now support the :meth:`get` and :meth:`setdefault` methods. +All database modules now support the :meth:`!get` and :meth:`!setdefault` methods. (Suggested by Ray Allen in :issue:`9523`.) @@ -2116,7 +2116,7 @@ :file:`.pdbrc` script file. * A :file:`.pdbrc` script file can contain ``continue`` and ``next`` commands that continue debugging. -* The :class:`Pdb` class constructor now accepts a *nosigint* argument. +* The :class:`~pdb.Pdb` class constructor now accepts a *nosigint* argument. * New commands: ``l(list)``, ``ll(long list)`` and ``source`` for listing source code. * New commands: ``display`` and ``undisplay`` for showing or hiding @@ -2392,11 +2392,11 @@ (Contributed by Antoine Pitrou; :issue:`3001`.) -* The fast-search algorithm in stringlib is now used by the :meth:`split`, - :meth:`rsplit`, :meth:`splitlines` and :meth:`replace` methods on +* The fast-search algorithm in stringlib is now used by the :meth:`~str.split`, + :meth:`~str.rsplit`, :meth:`~str.splitlines` and :meth:`~str.replace` methods on :class:`bytes`, :class:`bytearray` and :class:`str` objects. Likewise, the - algorithm is also used by :meth:`rfind`, :meth:`rindex`, :meth:`rsplit` and - :meth:`rpartition`. + algorithm is also used by :meth:`~str.rfind`, :meth:`~str.rindex`, :meth:`~str.rsplit` and + :meth:`~str.rpartition`. (Patch by Florent Xicluna in :issue:`7622` and :issue:`7462`.) @@ -2408,8 +2408,8 @@ There were several other minor optimizations. Set differencing now runs faster when one operand is much larger than the other (patch by Andress Bennetts in -:issue:`8685`). The :meth:`array.repeat` method has a faster implementation -(:issue:`1569291` by Alexander Belopolsky). The :class:`BaseHTTPRequestHandler` +:issue:`8685`). The :meth:`!array.repeat` method has a faster implementation +(:issue:`1569291` by Alexander Belopolsky). The :class:`~http.server.BaseHTTPRequestHandler` has more efficient buffering (:issue:`3709` by Andrew Schaaf). The :func:`operator.attrgetter` function has been sped-up (:issue:`10160` by Christos Georgiou). And :class:`~configparser.ConfigParser` loads multi-line arguments a bit @@ -2560,11 +2560,11 @@ (Suggested by Raymond Hettinger and implemented by Benjamin Peterson; :issue:`9778`.) -* A new macro :c:macro:`Py_VA_COPY` copies the state of the variable argument +* A new macro :c:macro:`!Py_VA_COPY` copies the state of the variable argument list. It is equivalent to C99 *va_copy* but available on all Python platforms (:issue:`2443`). -* A new C API function :c:func:`PySys_SetArgvEx` allows an embedded interpreter +* A new C API function :c:func:`!PySys_SetArgvEx` allows an embedded interpreter to set :data:`sys.argv` without also modifying :data:`sys.path` (:issue:`5753`). @@ -2648,8 +2648,9 @@ * :class:`bytearray` objects can no longer be used as filenames; instead, they should be converted to :class:`bytes`. -* The :meth:`array.tostring` and :meth:`array.fromstring` have been renamed to - :meth:`array.tobytes` and :meth:`array.frombytes` for clarity. The old names +* The :meth:`!array.tostring` and :meth:`!array.fromstring` have been renamed to + :meth:`array.tobytes() ` and + :meth:`array.frombytes() ` for clarity. The old names have been deprecated. (See :issue:`8990`.) * ``PyArg_Parse*()`` functions: @@ -2662,7 +2663,7 @@ instead; the new type has a well-defined interface for passing typing safety information and a less complicated signature for calling a destructor. -* The :func:`sys.setfilesystemencoding` function was removed because +* The :func:`!sys.setfilesystemencoding` function was removed because it had a flawed design. * The :func:`random.seed` function and method now salt string seeds with an @@ -2670,7 +2671,7 @@ reproduce Python 3.1 sequences, set the *version* argument to *1*, ``random.seed(s, version=1)``. -* The previously deprecated :func:`string.maketrans` function has been removed +* The previously deprecated :func:`!string.maketrans` function has been removed in favor of the static methods :meth:`bytes.maketrans` and :meth:`bytearray.maketrans`. This change solves the confusion around which types were supported by the :mod:`string` module. Now, :class:`str`, @@ -2680,7 +2681,7 @@ (Contributed by Georg Brandl; :issue:`5675`.) -* The previously deprecated :func:`contextlib.nested` function has been removed +* The previously deprecated :func:`!contextlib.nested` function has been removed in favor of a plain :keyword:`with` statement which can accept multiple context managers. The latter technique is faster (because it is built-in), and it does a better job finalizing multiple context managers when one of them diff -Nru python3.11-3.11.8/Doc/whatsnew/3.4.rst python3.11-3.11.9/Doc/whatsnew/3.4.rst --- python3.11-3.11.8/Doc/whatsnew/3.4.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/whatsnew/3.4.rst 2024-04-02 08:25:04.000000000 +0000 @@ -872,7 +872,7 @@ PEP written and implemented by Łukasz Langa. :func:`~functools.total_ordering` now supports a return value of -:const:`NotImplemented` from the underlying comparison function. (Contributed +:data:`NotImplemented` from the underlying comparison function. (Contributed by Katie Miller in :issue:`10042`.) A pure-python version of the :func:`~functools.partial` function is now in the diff -Nru python3.11-3.11.8/Doc/whatsnew/3.6.rst python3.11-3.11.9/Doc/whatsnew/3.6.rst --- python3.11-3.11.8/Doc/whatsnew/3.6.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/whatsnew/3.6.rst 2024-04-02 08:25:04.000000000 +0000 @@ -1472,6 +1472,10 @@ were added. (Contributed by Christian Heimes in :issue:`28085`.) +Added :attr:`ssl.SSLContext.post_handshake_auth` to enable and +:meth:`ssl.SSLSocket.verify_client_post_handshake` to initiate TLS 1.3 +post-handshake authentication. +(Contributed by Christian Heimes in :gh:`78851`.) statistics ---------- @@ -2063,6 +2067,15 @@ environment. (Contributed by Brett Cannon in :issue:`25154`.) +xml +--- + +* As mitigation against DTD and external entity retrieval, the + :mod:`xml.dom.minidom` and :mod:`xml.sax` modules no longer process + external entities by default. + (Contributed by Christian Heimes in :gh:`61441`.) + + Deprecated functions and types of the C API ------------------------------------------- @@ -2430,9 +2443,13 @@ locale to the ``LC_NUMERIC`` locale in some cases. (Contributed by Victor Stinner in :issue:`31900`.) + Notable changes in Python 3.6.7 =============================== +:mod:`xml.dom.minidom` and :mod:`xml.sax` modules no longer process +external entities by default. See also :gh:`61441`. + In 3.6.7 the :mod:`tokenize` module now implicitly emits a ``NEWLINE`` token when provided with input that does not have a trailing new line. This behavior now matches what the C tokenizer does internally. @@ -2460,3 +2477,19 @@ functions internally. For more details, please see their respective documentation. (Contributed by Adam Goldschmidt, Senthil Kumaran and Ken Jin in :issue:`42967`.) + +Notable changes in Python 3.6.14 +================================ + +A security fix alters the :class:`ftplib.FTP` behavior to not trust the +IPv4 address sent from the remote server when setting up a passive data +channel. We reuse the ftp server IP address instead. For unusual code +requiring the old behavior, set a ``trust_server_pasv_ipv4_address`` +attribute on your FTP instance to ``True``. (See :gh:`87451`) + +The presence of newline or tab characters in parts of a URL allows for some +forms of attacks. Following the WHATWG specification that updates RFC 3986, +ASCII newline ``\n``, ``\r`` and tab ``\t`` characters are stripped from the +URL by the parser :func:`urllib.parse` preventing such attacks. The removal +characters are controlled by a new module level variable +``urllib.parse._UNSAFE_URL_BYTES_TO_REMOVE``. (See :gh:`88048`) diff -Nru python3.11-3.11.8/Doc/whatsnew/3.7.rst python3.11-3.11.9/Doc/whatsnew/3.7.rst --- python3.11-3.11.8/Doc/whatsnew/3.7.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/whatsnew/3.7.rst 2024-04-02 08:25:04.000000000 +0000 @@ -1380,6 +1380,10 @@ :data:`~ssl.HAS_TLSv1_1`. (Contributed by Christian Heimes in :issue:`32609`.) +Added :attr:`ssl.SSLContext.post_handshake_auth` to enable and +:meth:`ssl.SSLSocket.verify_client_post_handshake` to initiate TLS 1.3 +post-handshake authentication. +(Contributed by Christian Heimes in :gh:`78851`.) string ------ @@ -1599,6 +1603,15 @@ (Contributed by Nick Coghlan in :issue:`31975`.) +xml +--- + +As mitigation against DTD and external entity retrieval, the +:mod:`xml.dom.minidom` and :mod:`xml.sax` modules no longer process +external entities by default. +(Contributed by Christian Heimes in :gh:`61441`.) + + xml.etree --------- @@ -2571,3 +2584,34 @@ functions internally. For more details, please see their respective documentation. (Contributed by Adam Goldschmidt, Senthil Kumaran and Ken Jin in :issue:`42967`.) + +Notable changes in Python 3.7.11 +================================ + +A security fix alters the :class:`ftplib.FTP` behavior to not trust the +IPv4 address sent from the remote server when setting up a passive data +channel. We reuse the ftp server IP address instead. For unusual code +requiring the old behavior, set a ``trust_server_pasv_ipv4_address`` +attribute on your FTP instance to ``True``. (See :gh:`87451`) + + +The presence of newline or tab characters in parts of a URL allows for some +forms of attacks. Following the WHATWG specification that updates RFC 3986, +ASCII newline ``\n``, ``\r`` and tab ``\t`` characters are stripped from the +URL by the parser :func:`urllib.parse` preventing such attacks. The removal +characters are controlled by a new module level variable +``urllib.parse._UNSAFE_URL_BYTES_TO_REMOVE``. (See :gh:`88048`) + +Notable security feature in 3.7.14 +================================== + +Converting between :class:`int` and :class:`str` in bases other than 2 +(binary), 4, 8 (octal), 16 (hexadecimal), or 32 such as base 10 (decimal) +now raises a :exc:`ValueError` if the number of digits in string form is +above a limit to avoid potential denial of service attacks due to the +algorithmic complexity. This is a mitigation for `CVE-2020-10735 +`_. +This limit can be configured or disabled by environment variable, command +line flag, or :mod:`sys` APIs. See the :ref:`integer string conversion +length limitation ` documentation. The default limit +is 4300 digits in string form. diff -Nru python3.11-3.11.8/Doc/whatsnew/3.8.rst python3.11-3.11.9/Doc/whatsnew/3.8.rst --- python3.11-3.11.8/Doc/whatsnew/3.8.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/whatsnew/3.8.rst 2024-04-02 08:25:04.000000000 +0000 @@ -2240,6 +2240,21 @@ (Contributed by Kyle Stanley, Antoine Pitrou, and Yury Selivanov in :issue:`37228`.) +Notable changes in Python 3.8.2 +=============================== + +Fixed a regression with the ``ignore`` callback of :func:`shutil.copytree`. +The argument types are now str and List[str] again. +(Contributed by Manuel Barkhau and Giampaolo Rodola in :gh:`83571`.) + +Notable changes in Python 3.8.3 +=============================== + +The constant values of future flags in the :mod:`__future__` module +are updated in order to prevent collision with compiler flags. Previously +``PyCF_ALLOW_TOP_LEVEL_AWAIT`` was clashing with ``CO_FUTURE_DIVISION``. +(Contributed by Batuhan Taskaya in :gh:`83743`) + Notable changes in Python 3.8.8 =============================== @@ -2253,9 +2268,55 @@ documentation. (Contributed by Adam Goldschmidt, Senthil Kumaran and Ken Jin in :issue:`42967`.) +Notable changes in Python 3.8.9 +=============================== + +A security fix alters the :class:`ftplib.FTP` behavior to not trust the +IPv4 address sent from the remote server when setting up a passive data +channel. We reuse the ftp server IP address instead. For unusual code +requiring the old behavior, set a ``trust_server_pasv_ipv4_address`` +attribute on your FTP instance to ``True``. (See :gh:`87451`) + +Notable changes in Python 3.8.10 +================================ + +macOS 11.0 (Big Sur) and Apple Silicon Mac support +-------------------------------------------------- + +As of 3.8.10, Python now supports building and running on macOS 11 +(Big Sur) and on Apple Silicon Macs (based on the ``ARM64`` architecture). +A new universal build variant, ``universal2``, is now available to natively +support both ``ARM64`` and ``Intel 64`` in one set of executables. +Note that support for "weaklinking", building binaries targeted for newer +versions of macOS that will also run correctly on older versions by +testing at runtime for missing features, is not included in this backport +from Python 3.9; to support a range of macOS versions, continue to target +for and build on the oldest version in the range. + +(Originally contributed by Ronald Oussoren and Lawrence D'Anna in :gh:`85272`, +with fixes by FX Coudert and Eli Rykoff, and backported to 3.8 by Maxime Bélanger +and Ned Deily) + +Notable changes in Python 3.8.10 +================================ + +urllib.parse +------------ + +The presence of newline or tab characters in parts of a URL allows for some +forms of attacks. Following the WHATWG specification that updates :rfc:`3986`, +ASCII newline ``\n``, ``\r`` and tab ``\t`` characters are stripped from the +URL by the parser in :mod:`urllib.parse` preventing such attacks. The removal +characters are controlled by a new module level variable +``urllib.parse._UNSAFE_URL_BYTES_TO_REMOVE``. (See :issue:`43882`) + + Notable changes in Python 3.8.12 ================================ +Changes in the Python API +------------------------- + Starting with Python 3.8.12 the :mod:`ipaddress` module no longer accepts any leading zeros in IPv4 address strings. Leading zeros are ambiguous and interpreted as octal notation by some libraries. For example the legacy @@ -2265,3 +2326,33 @@ (Originally contributed by Christian Heimes in :issue:`36384`, and backported to 3.8 by Achraf Merzouki.) + +Notable security feature in 3.8.14 +================================== + +Converting between :class:`int` and :class:`str` in bases other than 2 +(binary), 4, 8 (octal), 16 (hexadecimal), or 32 such as base 10 (decimal) +now raises a :exc:`ValueError` if the number of digits in string form is +above a limit to avoid potential denial of service attacks due to the +algorithmic complexity. This is a mitigation for `CVE-2020-10735 +`_. +This limit can be configured or disabled by environment variable, command +line flag, or :mod:`sys` APIs. See the :ref:`integer string conversion +length limitation ` documentation. The default limit +is 4300 digits in string form. + +Notable changes in 3.8.17 +========================= + +tarfile +------- + +* The extraction methods in :mod:`tarfile`, and :func:`shutil.unpack_archive`, + have a new a *filter* argument that allows limiting tar features than may be + surprising or dangerous, such as creating files outside the destination + directory. + See :ref:`tarfile-extraction-filter` for details. + In Python 3.12, use without the *filter* argument will show a + :exc:`DeprecationWarning`. + In Python 3.14, the default will switch to ``'data'``. + (Contributed by Petr Viktorin in :pep:`706`.) diff -Nru python3.11-3.11.8/Doc/whatsnew/3.9.rst python3.11-3.11.9/Doc/whatsnew/3.9.rst --- python3.11-3.11.8/Doc/whatsnew/3.9.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Doc/whatsnew/3.9.rst 2024-04-02 08:25:04.000000000 +0000 @@ -1126,7 +1126,7 @@ ``logging.getLogger(__name__)`` in some top-level module called ``'root.py'``. (Contributed by Vinay Sajip in :issue:`37742`.) -* Division handling of :class:`~pathlib.PurePath` now returns ``NotImplemented`` +* Division handling of :class:`~pathlib.PurePath` now returns :data:`NotImplemented` instead of raising a :exc:`TypeError` when passed something other than an instance of ``str`` or :class:`~pathlib.PurePath`. This allows creating compatible classes that don't inherit from those mentioned types. @@ -1562,3 +1562,55 @@ functions internally. For more details, please see their respective documentation. (Contributed by Adam Goldschmidt, Senthil Kumaran and Ken Jin in :issue:`42967`.) + +Notable changes in Python 3.9.3 +=============================== + +A security fix alters the :class:`ftplib.FTP` behavior to not trust the +IPv4 address sent from the remote server when setting up a passive data +channel. We reuse the ftp server IP address instead. For unusual code +requiring the old behavior, set a ``trust_server_pasv_ipv4_address`` +attribute on your FTP instance to ``True``. (See :gh:`87451`) + +Notable changes in Python 3.9.5 +=============================== + +urllib.parse +------------ + +The presence of newline or tab characters in parts of a URL allows for some +forms of attacks. Following the WHATWG specification that updates :rfc:`3986`, +ASCII newline ``\n``, ``\r`` and tab ``\t`` characters are stripped from the +URL by the parser in :mod:`urllib.parse` preventing such attacks. The removal +characters are controlled by a new module level variable +``urllib.parse._UNSAFE_URL_BYTES_TO_REMOVE``. (See :gh:`88048`) + +Notable security feature in 3.9.14 +================================== + +Converting between :class:`int` and :class:`str` in bases other than 2 +(binary), 4, 8 (octal), 16 (hexadecimal), or 32 such as base 10 (decimal) +now raises a :exc:`ValueError` if the number of digits in string form is +above a limit to avoid potential denial of service attacks due to the +algorithmic complexity. This is a mitigation for `CVE-2020-10735 +`_. +This limit can be configured or disabled by environment variable, command +line flag, or :mod:`sys` APIs. See the :ref:`integer string conversion +length limitation ` documentation. The default limit +is 4300 digits in string form. + +Notable changes in 3.9.17 +========================= + +tarfile +------- + +* The extraction methods in :mod:`tarfile`, and :func:`shutil.unpack_archive`, + have a new a *filter* argument that allows limiting tar features than may be + surprising or dangerous, such as creating files outside the destination + directory. + See :ref:`tarfile-extraction-filter` for details. + In Python 3.12, use without the *filter* argument will show a + :exc:`DeprecationWarning`. + In Python 3.14, the default will switch to ``'data'``. + (Contributed by Petr Viktorin in :pep:`706`.) diff -Nru python3.11-3.11.8/Grammar/python.gram python3.11-3.11.9/Grammar/python.gram --- python3.11-3.11.8/Grammar/python.gram 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Grammar/python.gram 2024-04-02 08:25:04.000000000 +0000 @@ -391,7 +391,7 @@ with_stmt[stmt_ty]: | invalid_with_stmt_indent | 'with' '(' a[asdl_withitem_seq*]=','.with_item+ ','? ')' ':' b=block { - CHECK_VERSION(stmt_ty, 9, "Parenthesized context managers are", _PyAST_With(a, b, NULL, EXTRA)) } + _PyAST_With(a, b, NULL, EXTRA) } | 'with' a[asdl_withitem_seq*]=','.with_item+ ':' tc=[TYPE_COMMENT] b=block { _PyAST_With(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA) } | ASYNC 'with' '(' a[asdl_withitem_seq*]=','.with_item+ ','? ')' ':' b=block { diff -Nru python3.11-3.11.8/Include/Python.h python3.11-3.11.9/Include/Python.h --- python3.11-3.11.8/Include/Python.h 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Include/Python.h 2024-04-02 08:25:04.000000000 +0000 @@ -49,6 +49,10 @@ #include "bytearrayobject.h" #include "bytesobject.h" #include "unicodeobject.h" +#include "cpython/code.h" +#include "cpython/initconfig.h" +#include "pystate.h" +#include "pyerrors.h" #include "longobject.h" #include "cpython/longintrepr.h" #include "boolobject.h" @@ -68,14 +72,11 @@ #include "cpython/classobject.h" #include "fileobject.h" #include "pycapsule.h" -#include "cpython/code.h" #include "pyframe.h" #include "traceback.h" #include "sliceobject.h" #include "cpython/cellobject.h" #include "iterobject.h" -#include "cpython/initconfig.h" -#include "pystate.h" #include "cpython/genobject.h" #include "descrobject.h" #include "genericaliasobject.h" @@ -85,7 +86,6 @@ #include "cpython/picklebufobject.h" #include "cpython/pytime.h" #include "codecs.h" -#include "pyerrors.h" #include "pythread.h" #include "cpython/context.h" #include "modsupport.h" diff -Nru python3.11-3.11.8/Include/longobject.h python3.11-3.11.9/Include/longobject.h --- python3.11-3.11.8/Include/longobject.h 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Include/longobject.h 2024-04-02 08:25:04.000000000 +0000 @@ -34,7 +34,24 @@ #if !defined(SIZEOF_PID_T) || SIZEOF_PID_T == SIZEOF_INT #define _Py_PARSE_PID "i" #define PyLong_FromPid PyLong_FromLong -#define PyLong_AsPid PyLong_AsLong +# ifndef Py_LIMITED_API +# define PyLong_AsPid _PyLong_AsInt +# elif SIZEOF_INT == SIZEOF_LONG +# define PyLong_AsPid PyLong_AsLong +# else +static inline int +PyLong_AsPid(PyObject *obj) +{ + int overflow; + long result = PyLong_AsLongAndOverflow(obj, &overflow); + if (overflow || result > INT_MAX || result < INT_MIN) { + PyErr_SetString(PyExc_OverflowError, + "Python int too large to convert to C int"); + return -1; + } + return (int)result; +} +# endif #elif SIZEOF_PID_T == SIZEOF_LONG #define _Py_PARSE_PID "l" #define PyLong_FromPid PyLong_FromLong diff -Nru python3.11-3.11.8/Include/patchlevel.h python3.11-3.11.9/Include/patchlevel.h --- python3.11-3.11.8/Include/patchlevel.h 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Include/patchlevel.h 2024-04-02 08:25:04.000000000 +0000 @@ -18,12 +18,12 @@ /*--start constants--*/ #define PY_MAJOR_VERSION 3 #define PY_MINOR_VERSION 11 -#define PY_MICRO_VERSION 8 +#define PY_MICRO_VERSION 9 #define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_FINAL #define PY_RELEASE_SERIAL 0 /* Version as a string */ -#define PY_VERSION "3.11.8" +#define PY_VERSION "3.11.9" /*--end constants--*/ /* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2. diff -Nru python3.11-3.11.8/Include/pyexpat.h python3.11-3.11.9/Include/pyexpat.h --- python3.11-3.11.8/Include/pyexpat.h 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Include/pyexpat.h 2024-04-02 08:25:04.000000000 +0000 @@ -48,8 +48,10 @@ enum XML_Status (*SetEncoding)(XML_Parser parser, const XML_Char *encoding); int (*DefaultUnknownEncodingHandler)( void *encodingHandlerData, const XML_Char *name, XML_Encoding *info); - /* might be none for expat < 2.1.0 */ + /* might be NULL for expat < 2.1.0 */ int (*SetHashSalt)(XML_Parser parser, unsigned long hash_salt); + /* might be NULL for expat < 2.6.0 */ + XML_Bool (*SetReparseDeferralEnabled)(XML_Parser parser, XML_Bool enabled); /* always add new stuff to the end! */ }; diff -Nru python3.11-3.11.8/Lib/_pyio.py python3.11-3.11.9/Lib/_pyio.py --- python3.11-3.11.8/Lib/_pyio.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/_pyio.py 2024-04-02 08:25:04.000000000 +0000 @@ -1224,7 +1224,8 @@ return written def tell(self): - return _BufferedIOMixin.tell(self) - len(self._read_buf) + self._read_pos + # GH-95782: Keep return value non-negative + return max(_BufferedIOMixin.tell(self) - len(self._read_buf) + self._read_pos, 0) def seek(self, pos, whence=0): if whence not in valid_seek_flags: diff -Nru python3.11-3.11.8/Lib/argparse.py python3.11-3.11.9/Lib/argparse.py --- python3.11-3.11.8/Lib/argparse.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/argparse.py 2024-04-02 08:25:04.000000000 +0000 @@ -225,7 +225,8 @@ # add the heading if the section was non-empty if self.heading is not SUPPRESS and self.heading is not None: current_indent = self.formatter._current_indent - heading = '%*s%s:\n' % (current_indent, '', self.heading) + heading_text = _('%(heading)s:') % dict(heading=self.heading) + heading = '%*s%s\n' % (current_indent, '', heading_text) else: heading = '' @@ -414,6 +415,8 @@ suppressed_actions_count += 1 exposed_actions_count = group_action_count - suppressed_actions_count + if not exposed_actions_count: + continue if not group.required: if start in inserts: @@ -719,7 +722,7 @@ if action.default is not SUPPRESS: defaulting_nargs = [OPTIONAL, ZERO_OR_MORE] if action.option_strings or action.nargs in defaulting_nargs: - help += ' (default: %(default)s)' + help += _(' (default: %(default)s)') return help @@ -1127,7 +1130,9 @@ version=None, dest=SUPPRESS, default=SUPPRESS, - help="show program's version number and exit"): + help=None): + if help is None: + help = _("show program's version number and exit") super(_VersionAction, self).__init__( option_strings=option_strings, dest=dest, @@ -1982,7 +1987,7 @@ # get the optional identified at this index option_tuple = option_string_indices[start_index] - action, option_string, explicit_arg = option_tuple + action, option_string, sep, explicit_arg = option_tuple # identify additional optionals in the same arg string # (e.g. -xyz is the same as -x -y -z if no args are required) @@ -2009,18 +2014,27 @@ and option_string[1] not in chars and explicit_arg != '' ): + if sep or explicit_arg[0] in chars: + msg = _('ignored explicit argument %r') + raise ArgumentError(action, msg % explicit_arg) action_tuples.append((action, [], option_string)) char = option_string[0] option_string = char + explicit_arg[0] - new_explicit_arg = explicit_arg[1:] or None optionals_map = self._option_string_actions if option_string in optionals_map: action = optionals_map[option_string] - explicit_arg = new_explicit_arg + explicit_arg = explicit_arg[1:] + if not explicit_arg: + sep = explicit_arg = None + elif explicit_arg[0] == '=': + sep = '=' + explicit_arg = explicit_arg[1:] + else: + sep = '' else: - msg = _('ignored explicit argument %r') - raise ArgumentError(action, msg % explicit_arg) - + extras.append(char + explicit_arg) + stop = start_index + 1 + break # if the action expect exactly one argument, we've # successfully matched the option; exit the loop elif arg_count == 1: @@ -2238,18 +2252,17 @@ # if the option string is present in the parser, return the action if arg_string in self._option_string_actions: action = self._option_string_actions[arg_string] - return action, arg_string, None + return action, arg_string, None, None # if it's just a single character, it was meant to be positional if len(arg_string) == 1: return None # if the option string before the "=" is present, return the action - if '=' in arg_string: - option_string, explicit_arg = arg_string.split('=', 1) - if option_string in self._option_string_actions: - action = self._option_string_actions[option_string] - return action, option_string, explicit_arg + option_string, sep, explicit_arg = arg_string.partition('=') + if sep and option_string in self._option_string_actions: + action = self._option_string_actions[option_string] + return action, option_string, sep, explicit_arg # search through all possible prefixes of the option string # and all actions in the parser for possible interpretations @@ -2258,7 +2271,7 @@ # if multiple actions match, the option string was ambiguous if len(option_tuples) > 1: options = ', '.join([option_string - for action, option_string, explicit_arg in option_tuples]) + for action, option_string, sep, explicit_arg in option_tuples]) args = {'option': arg_string, 'matches': options} msg = _('ambiguous option: %(option)s could match %(matches)s') self.error(msg % args) @@ -2282,7 +2295,7 @@ # it was meant to be an optional but there is no such option # in this parser (though it might be a valid option in a subparser) - return None, arg_string, None + return None, arg_string, None, None def _get_option_tuples(self, option_string): result = [] @@ -2292,15 +2305,13 @@ chars = self.prefix_chars if option_string[0] in chars and option_string[1] in chars: if self.allow_abbrev: - if '=' in option_string: - option_prefix, explicit_arg = option_string.split('=', 1) - else: - option_prefix = option_string - explicit_arg = None + option_prefix, sep, explicit_arg = option_string.partition('=') + if not sep: + sep = explicit_arg = None for option_string in self._option_string_actions: if option_string.startswith(option_prefix): action = self._option_string_actions[option_string] - tup = action, option_string, explicit_arg + tup = action, option_string, sep, explicit_arg result.append(tup) # single character options can be concatenated with their arguments @@ -2308,18 +2319,17 @@ # separate elif option_string[0] in chars and option_string[1] not in chars: option_prefix = option_string - explicit_arg = None short_option_prefix = option_string[:2] short_explicit_arg = option_string[2:] for option_string in self._option_string_actions: if option_string == short_option_prefix: action = self._option_string_actions[option_string] - tup = action, option_string, short_explicit_arg + tup = action, option_string, '', short_explicit_arg result.append(tup) elif option_string.startswith(option_prefix): action = self._option_string_actions[option_string] - tup = action, option_string, explicit_arg + tup = action, option_string, None, None result.append(tup) # shouldn't ever get here diff -Nru python3.11-3.11.8/Lib/asyncio/windows_events.py python3.11-3.11.9/Lib/asyncio/windows_events.py --- python3.11-3.11.8/Lib/asyncio/windows_events.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/asyncio/windows_events.py 2024-04-02 08:25:04.000000000 +0000 @@ -323,13 +323,13 @@ if self._self_reading_future is not None: ov = self._self_reading_future._ov self._self_reading_future.cancel() - # self_reading_future was just cancelled so if it hasn't been - # finished yet, it never will be (it's possible that it has - # already finished and its callback is waiting in the queue, - # where it could still happen if the event loop is restarted). - # Unregister it otherwise IocpProactor.close will wait for it - # forever - if ov is not None: + # self_reading_future always uses IOCP, so even though it's + # been cancelled, we need to make sure that the IOCP message + # is received so that the kernel is not holding on to the + # memory, possibly causing memory corruption later. Only + # unregister it if IO is complete in all respects. Otherwise + # we need another _poll() later to complete the IO. + if ov is not None and not ov.pending: self._proactor._unregister(ov) self._self_reading_future = None @@ -513,6 +513,10 @@ try: return ov.getresult() except OSError as exc: + # WSARecvFrom will report ERROR_PORT_UNREACHABLE when the same + # socket is used to send to an address that is not listening. + if exc.winerror == _overlapped.ERROR_PORT_UNREACHABLE: + return b'', None if exc.winerror in (_overlapped.ERROR_NETNAME_DELETED, _overlapped.ERROR_OPERATION_ABORTED): raise ConnectionResetError(*exc.args) @@ -533,6 +537,10 @@ try: return ov.getresult() except OSError as exc: + # WSARecvFrom will report ERROR_PORT_UNREACHABLE when the same + # socket is used to send to an address that is not listening. + if exc.winerror == _overlapped.ERROR_PORT_UNREACHABLE: + return 0, None if exc.winerror in (_overlapped.ERROR_NETNAME_DELETED, _overlapped.ERROR_OPERATION_ABORTED): raise ConnectionResetError(*exc.args) diff -Nru python3.11-3.11.8/Lib/collections/__init__.py python3.11-3.11.9/Lib/collections/__init__.py --- python3.11-3.11.8/Lib/collections/__init__.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/collections/__init__.py 2024-04-02 08:25:04.000000000 +0000 @@ -671,7 +671,7 @@ ''' # The regular dict.update() operation makes no sense here because the - # replace behavior results in the some of original untouched counts + # replace behavior results in some of the original untouched counts # being mixed-in with all of the other counts for a mismash that # doesn't have a straight-forward interpretation in most counting # contexts. Instead, we implement straight-addition. Both the inputs diff -Nru python3.11-3.11.8/Lib/configparser.py python3.11-3.11.9/Lib/configparser.py --- python3.11-3.11.8/Lib/configparser.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/configparser.py 2024-04-02 08:25:04.000000000 +0000 @@ -1033,100 +1033,102 @@ lineno = 0 indent_level = 0 e = None # None, or an exception - for lineno, line in enumerate(fp, start=1): - comment_start = sys.maxsize - # strip inline comments - inline_prefixes = {p: -1 for p in self._inline_comment_prefixes} - while comment_start == sys.maxsize and inline_prefixes: - next_prefixes = {} - for prefix, index in inline_prefixes.items(): - index = line.find(prefix, index+1) - if index == -1: - continue - next_prefixes[prefix] = index - if index == 0 or (index > 0 and line[index-1].isspace()): - comment_start = min(comment_start, index) - inline_prefixes = next_prefixes - # strip full line comments - for prefix in self._comment_prefixes: - if line.strip().startswith(prefix): - comment_start = 0 - break - if comment_start == sys.maxsize: - comment_start = None - value = line[:comment_start].strip() - if not value: - if self._empty_lines_in_values: - # add empty line to the value, but only if there was no - # comment on the line - if (comment_start is None and - cursect is not None and - optname and - cursect[optname] is not None): - cursect[optname].append('') # newlines added at join - else: - # empty line marks end of value - indent_level = sys.maxsize - continue - # continuation line? - first_nonspace = self.NONSPACECRE.search(line) - cur_indent_level = first_nonspace.start() if first_nonspace else 0 - if (cursect is not None and optname and - cur_indent_level > indent_level): - cursect[optname].append(value) - # a section header or option header? - else: - indent_level = cur_indent_level - # is it a section header? - mo = self.SECTCRE.match(value) - if mo: - sectname = mo.group('header') - if sectname in self._sections: - if self._strict and sectname in elements_added: - raise DuplicateSectionError(sectname, fpname, - lineno) - cursect = self._sections[sectname] - elements_added.add(sectname) - elif sectname == self.default_section: - cursect = self._defaults + try: + for lineno, line in enumerate(fp, start=1): + comment_start = sys.maxsize + # strip inline comments + inline_prefixes = {p: -1 for p in self._inline_comment_prefixes} + while comment_start == sys.maxsize and inline_prefixes: + next_prefixes = {} + for prefix, index in inline_prefixes.items(): + index = line.find(prefix, index+1) + if index == -1: + continue + next_prefixes[prefix] = index + if index == 0 or (index > 0 and line[index-1].isspace()): + comment_start = min(comment_start, index) + inline_prefixes = next_prefixes + # strip full line comments + for prefix in self._comment_prefixes: + if line.strip().startswith(prefix): + comment_start = 0 + break + if comment_start == sys.maxsize: + comment_start = None + value = line[:comment_start].strip() + if not value: + if self._empty_lines_in_values: + # add empty line to the value, but only if there was no + # comment on the line + if (comment_start is None and + cursect is not None and + optname and + cursect[optname] is not None): + cursect[optname].append('') # newlines added at join else: - cursect = self._dict() - self._sections[sectname] = cursect - self._proxies[sectname] = SectionProxy(self, sectname) - elements_added.add(sectname) - # So sections can't start with a continuation line - optname = None - # no section header in the file? - elif cursect is None: - raise MissingSectionHeaderError(fpname, lineno, line) - # an option line? + # empty line marks end of value + indent_level = sys.maxsize + continue + # continuation line? + first_nonspace = self.NONSPACECRE.search(line) + cur_indent_level = first_nonspace.start() if first_nonspace else 0 + if (cursect is not None and optname and + cur_indent_level > indent_level): + cursect[optname].append(value) + # a section header or option header? else: - mo = self._optcre.match(value) + indent_level = cur_indent_level + # is it a section header? + mo = self.SECTCRE.match(value) if mo: - optname, vi, optval = mo.group('option', 'vi', 'value') - if not optname: - e = self._handle_error(e, fpname, lineno, line) - optname = self.optionxform(optname.rstrip()) - if (self._strict and - (sectname, optname) in elements_added): - raise DuplicateOptionError(sectname, optname, - fpname, lineno) - elements_added.add((sectname, optname)) - # This check is fine because the OPTCRE cannot - # match if it would set optval to None - if optval is not None: - optval = optval.strip() - cursect[optname] = [optval] + sectname = mo.group('header') + if sectname in self._sections: + if self._strict and sectname in elements_added: + raise DuplicateSectionError(sectname, fpname, + lineno) + cursect = self._sections[sectname] + elements_added.add(sectname) + elif sectname == self.default_section: + cursect = self._defaults else: - # valueless option handling - cursect[optname] = None + cursect = self._dict() + self._sections[sectname] = cursect + self._proxies[sectname] = SectionProxy(self, sectname) + elements_added.add(sectname) + # So sections can't start with a continuation line + optname = None + # no section header in the file? + elif cursect is None: + raise MissingSectionHeaderError(fpname, lineno, line) + # an option line? else: - # a non-fatal parsing error occurred. set up the - # exception but keep going. the exception will be - # raised at the end of the file and will contain a - # list of all bogus lines - e = self._handle_error(e, fpname, lineno, line) - self._join_multiline_values() + mo = self._optcre.match(value) + if mo: + optname, vi, optval = mo.group('option', 'vi', 'value') + if not optname: + e = self._handle_error(e, fpname, lineno, line) + optname = self.optionxform(optname.rstrip()) + if (self._strict and + (sectname, optname) in elements_added): + raise DuplicateOptionError(sectname, optname, + fpname, lineno) + elements_added.add((sectname, optname)) + # This check is fine because the OPTCRE cannot + # match if it would set optval to None + if optval is not None: + optval = optval.strip() + cursect[optname] = [optval] + else: + # valueless option handling + cursect[optname] = None + else: + # a non-fatal parsing error occurred. set up the + # exception but keep going. the exception will be + # raised at the end of the file and will contain a + # list of all bogus lines + e = self._handle_error(e, fpname, lineno, line) + finally: + self._join_multiline_values() # if any parsing errors occurred, raise an exception if e: raise e diff -Nru python3.11-3.11.8/Lib/ctypes/test/test_loading.py python3.11-3.11.9/Lib/ctypes/test/test_loading.py --- python3.11-3.11.8/Lib/ctypes/test/test_loading.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/ctypes/test/test_loading.py 2024-04-02 08:25:04.000000000 +0000 @@ -45,11 +45,15 @@ self.assertRaises(OSError, cdll.LoadLibrary, self.unknowndll) def test_find(self): + found = False for name in ("c", "m"): lib = find_library(name) if lib: + found = True cdll.LoadLibrary(lib) CDLL(lib) + if not found: + self.skipTest("Could not find c and m libraries") @unittest.skipUnless(os.name == "nt", 'test specific to Windows') diff -Nru python3.11-3.11.8/Lib/dataclasses.py python3.11-3.11.9/Lib/dataclasses.py --- python3.11-3.11.8/Lib/dataclasses.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/dataclasses.py 2024-04-02 08:25:04.000000000 +0000 @@ -1132,8 +1132,10 @@ def _get_slots(cls): match cls.__dict__.get('__slots__'): + # A class which does not define __slots__ at all is equivalent + # to a class defining __slots__ = ('__dict__', '__weakref__') case None: - return + yield from ('__dict__', '__weakref__') case str(slot): yield slot # Slots may be any iterable, but we cannot handle an iterator diff -Nru python3.11-3.11.8/Lib/distutils/command/check.py python3.11-3.11.9/Lib/distutils/command/check.py --- python3.11-3.11.8/Lib/distutils/command/check.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/distutils/command/check.py 2024-04-02 08:25:04.000000000 +0000 @@ -125,7 +125,13 @@ # the include and csv_table directives need this to be a path source_path = self.distribution.script_name or 'setup.py' parser = Parser() - settings = frontend.OptionParser(components=(Parser,)).get_default_values() + try: + get_default_settings = frontend.get_default_settings + except AttributeError: + # Deprecated in Docutils 0.19, may be broken in Docutils 0.21. + settings = frontend.OptionParser(components=(Parser,)).get_default_values() + else: + settings = get_default_settings(Parser) settings.tab_width = 4 settings.pep_references = None settings.rfc_references = None diff -Nru python3.11-3.11.8/Lib/distutils/tests/test_register.py python3.11-3.11.9/Lib/distutils/tests/test_register.py --- python3.11-3.11.8/Lib/distutils/tests/test_register.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/distutils/tests/test_register.py 2024-04-02 08:25:04.000000000 +0000 @@ -291,6 +291,8 @@ cmd = self._get_cmd() with check_warnings() as w: warnings.simplefilter("always") + warnings.filterwarnings("ignore", ".*OptionParser class will be replaced.*") + warnings.filterwarnings("ignore", ".*Option class will be removed.*") cmd.check_metadata() self.assertEqual(len(w.warnings), 1) diff -Nru python3.11-3.11.8/Lib/doctest.py python3.11-3.11.9/Lib/doctest.py --- python3.11-3.11.8/Lib/doctest.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/doctest.py 2024-04-02 08:25:04.000000000 +0000 @@ -1118,7 +1118,7 @@ obj = obj.fget if inspect.isfunction(obj) and getattr(obj, '__doc__', None): # We don't use `docstring` var here, because `obj` can be changed. - obj = obj.__code__ + obj = inspect.unwrap(obj).__code__ if inspect.istraceback(obj): obj = obj.tb_frame if inspect.isframe(obj): obj = obj.f_code if inspect.iscode(obj): @@ -2197,13 +2197,13 @@ unittest.TestCase.__init__(self) self._dt_optionflags = optionflags self._dt_checker = checker - self._dt_globs = test.globs.copy() self._dt_test = test self._dt_setUp = setUp self._dt_tearDown = tearDown def setUp(self): test = self._dt_test + self._dt_globs = test.globs.copy() if self._dt_setUp is not None: self._dt_setUp(test) diff -Nru python3.11-3.11.8/Lib/email/_header_value_parser.py python3.11-3.11.9/Lib/email/_header_value_parser.py --- python3.11-3.11.8/Lib/email/_header_value_parser.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/email/_header_value_parser.py 2024-04-02 08:25:04.000000000 +0000 @@ -949,6 +949,7 @@ # up other parse trees. Maybe should have tests for that, too. DOT = ValueTerminal('.', 'dot') ListSeparator = ValueTerminal(',', 'list-separator') +ListSeparator.as_ew_allowed = False RouteComponentMarker = ValueTerminal('@', 'route-component-marker') # @@ -2022,7 +2023,7 @@ address_list.defects.append(errors.InvalidHeaderDefect( "invalid address in address-list")) if value: # Must be a , at this point. - address_list.append(ValueTerminal(',', 'list-separator')) + address_list.append(ListSeparator) value = value[1:] return address_list, value diff -Nru python3.11-3.11.8/Lib/email/generator.py python3.11-3.11.9/Lib/email/generator.py --- python3.11-3.11.8/Lib/email/generator.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/email/generator.py 2024-04-02 08:25:04.000000000 +0000 @@ -243,7 +243,7 @@ # existing message. msg = deepcopy(msg) del msg['content-transfer-encoding'] - msg.set_payload(payload, charset) + msg.set_payload(msg._payload, charset) payload = msg.get_payload() self._munge_cte = (msg['content-transfer-encoding'], msg['content-type']) diff -Nru python3.11-3.11.8/Lib/email/message.py python3.11-3.11.9/Lib/email/message.py --- python3.11-3.11.8/Lib/email/message.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/email/message.py 2024-04-02 08:25:04.000000000 +0000 @@ -340,7 +340,7 @@ return if not isinstance(charset, Charset): charset = Charset(charset) - payload = payload.encode(charset.output_charset) + payload = payload.encode(charset.output_charset, 'surrogateescape') if hasattr(payload, 'decode'): self._payload = payload.decode('ascii', 'surrogateescape') else: diff -Nru python3.11-3.11.8/Lib/enum.py python3.11-3.11.9/Lib/enum.py --- python3.11-3.11.8/Lib/enum.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/enum.py 2024-04-02 08:25:04.000000000 +0000 @@ -276,9 +276,10 @@ enum_member._sort_order_ = len(enum_class._member_names_) if Flag is not None and issubclass(enum_class, Flag): - enum_class._flag_mask_ |= value - if _is_single_bit(value): - enum_class._singles_mask_ |= value + if isinstance(value, int): + enum_class._flag_mask_ |= value + if _is_single_bit(value): + enum_class._singles_mask_ |= value enum_class._all_bits_ = 2 ** ((enum_class._flag_mask_).bit_length()) - 1 # If another member with the same value was already defined, the @@ -306,6 +307,7 @@ elif ( Flag is not None and issubclass(enum_class, Flag) + and isinstance(value, int) and _is_single_bit(value) ): # no other instances found, record this member in _member_names_ @@ -1502,37 +1504,50 @@ def __bool__(self): return bool(self._value_) + def _get_value(self, flag): + if isinstance(flag, self.__class__): + return flag._value_ + elif self._member_type_ is not object and isinstance(flag, self._member_type_): + return flag + return NotImplemented + def __or__(self, other): - if isinstance(other, self.__class__): - other = other._value_ - elif self._member_type_ is not object and isinstance(other, self._member_type_): - other = other - else: + other_value = self._get_value(other) + if other_value is NotImplemented: return NotImplemented + + for flag in self, other: + if self._get_value(flag) is None: + raise TypeError(f"'{flag}' cannot be combined with other flags with |") value = self._value_ - return self.__class__(value | other) + return self.__class__(value | other_value) def __and__(self, other): - if isinstance(other, self.__class__): - other = other._value_ - elif self._member_type_ is not object and isinstance(other, self._member_type_): - other = other - else: + other_value = self._get_value(other) + if other_value is NotImplemented: return NotImplemented + + for flag in self, other: + if self._get_value(flag) is None: + raise TypeError(f"'{flag}' cannot be combined with other flags with &") value = self._value_ - return self.__class__(value & other) + return self.__class__(value & other_value) def __xor__(self, other): - if isinstance(other, self.__class__): - other = other._value_ - elif self._member_type_ is not object and isinstance(other, self._member_type_): - other = other - else: + other_value = self._get_value(other) + if other_value is NotImplemented: return NotImplemented + + for flag in self, other: + if self._get_value(flag) is None: + raise TypeError(f"'{flag}' cannot be combined with other flags with ^") value = self._value_ - return self.__class__(value ^ other) + return self.__class__(value ^ other_value) def __invert__(self): + if self._get_value(self) is None: + raise TypeError(f"'{self}' cannot be inverted") + if self._inverted_ is None: if self._boundary_ in (EJECT, KEEP): self._inverted_ = self.__class__(~self._value_) @@ -1596,7 +1611,7 @@ cls_name = self.__class__.__name__ if self._name_ is None: return "%s.%s(%r)" % (module, cls_name, self._value_) - if _is_single_bit(self): + if _is_single_bit(self._value_): return '%s.%s' % (module, self._name_) if self._boundary_ is not FlagBoundary.KEEP: return '|'.join(['%s.%s' % (module, name) for name in self.name.split('|')]) diff -Nru python3.11-3.11.8/Lib/glob.py python3.11-3.11.9/Lib/glob.py --- python3.11-3.11.8/Lib/glob.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/glob.py 2024-04-02 08:25:04.000000000 +0000 @@ -132,7 +132,8 @@ def _glob2(dirname, pattern, dir_fd, dironly, include_hidden=False): assert _isrecursive(pattern) - yield pattern[:0] + if not dirname or _isdir(dirname, dir_fd): + yield pattern[:0] yield from _rlistdir(dirname, dir_fd, dironly, include_hidden=include_hidden) diff -Nru python3.11-3.11.8/Lib/http/client.py python3.11-3.11.9/Lib/http/client.py --- python3.11-3.11.8/Lib/http/client.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/http/client.py 2024-04-02 08:25:04.000000000 +0000 @@ -907,17 +907,23 @@ host = host[:i] else: port = self.default_port - if host and host[0] == '[' and host[-1] == ']': - host = host[1:-1] + if host and host[0] == '[' and host[-1] == ']': + host = host[1:-1] return (host, port) def set_debuglevel(self, level): self.debuglevel = level + def _wrap_ipv6(self, ip): + if b':' in ip and ip[0] != b'['[0]: + return b"[" + ip + b"]" + return ip + def _tunnel(self): connect = b"CONNECT %s:%d HTTP/1.0\r\n" % ( - self._tunnel_host.encode("ascii"), self._tunnel_port) + self._wrap_ipv6(self._tunnel_host.encode("ascii")), + self._tunnel_port) headers = [connect] for header, value in self._tunnel_headers.items(): headers.append(f"{header}: {value}\r\n".encode("latin-1")) @@ -1188,9 +1194,8 @@ # As per RFC 273, IPv6 address should be wrapped with [] # when used as Host header - + host_enc = self._wrap_ipv6(host_enc) if ":" in host: - host_enc = b'[' + host_enc + b']' host_enc = _strip_ipv6_iface(host_enc) if port == self.default_port: diff -Nru python3.11-3.11.8/Lib/idlelib/editor.py python3.11-3.11.9/Lib/idlelib/editor.py --- python3.11-3.11.8/Lib/idlelib/editor.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/idlelib/editor.py 2024-04-02 08:25:04.000000000 +0000 @@ -1044,7 +1044,9 @@ def saved_change_hook(self): short = self.short_title() long = self.long_title() - if short and long: + if short and long and not macosx.isCocoaTk(): + # Don't use both values on macOS because + # that doesn't match platform conventions. title = short + " - " + long + _py_version elif short: title = short @@ -1059,6 +1061,13 @@ self.top.wm_title(title) self.top.wm_iconname(icon) + if macosx.isCocoaTk(): + # Add a proxy icon to the window title + self.top.wm_attributes("-titlepath", long) + + # Maintain the modification status for the window + self.top.wm_attributes("-modified", not self.get_saved()) + def get_saved(self): return self.undo.get_saved() diff -Nru python3.11-3.11.8/Lib/importlib/_bootstrap_external.py python3.11-3.11.9/Lib/importlib/_bootstrap_external.py --- python3.11-3.11.8/Lib/importlib/_bootstrap_external.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/importlib/_bootstrap_external.py 2024-04-02 08:25:04.000000000 +0000 @@ -1405,6 +1405,9 @@ # https://bugs.python.org/issue45703 _NamespacePath._epoch += 1 + from importlib.metadata import MetadataPathFinder + MetadataPathFinder.invalidate_caches() + @staticmethod def _path_hooks(path): """Search sys.path_hooks for a finder for 'path'.""" diff -Nru python3.11-3.11.8/Lib/importlib/metadata/__init__.py python3.11-3.11.9/Lib/importlib/metadata/__init__.py --- python3.11-3.11.8/Lib/importlib/metadata/__init__.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/importlib/metadata/__init__.py 2024-04-02 08:25:04.000000000 +0000 @@ -915,6 +915,7 @@ path.search(prepared) for path in map(FastPath, paths) ) + @classmethod def invalidate_caches(cls): FastPath.__new__.cache_clear() diff -Nru python3.11-3.11.8/Lib/importlib/resources/simple.py python3.11-3.11.9/Lib/importlib/resources/simple.py --- python3.11-3.11.8/Lib/importlib/resources/simple.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/importlib/resources/simple.py 2024-04-02 08:25:04.000000000 +0000 @@ -69,7 +69,7 @@ def open(self, mode='r', *args, **kwargs): stream = self.parent.reader.open_binary(self.name) if 'b' not in mode: - stream = io.TextIOWrapper(*args, **kwargs) + stream = io.TextIOWrapper(stream, *args, **kwargs) return stream def joinpath(self, name): diff -Nru python3.11-3.11.8/Lib/importlib/util.py python3.11-3.11.9/Lib/importlib/util.py --- python3.11-3.11.8/Lib/importlib/util.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/importlib/util.py 2024-04-02 08:25:04.000000000 +0000 @@ -15,6 +15,7 @@ import _imp import functools import sys +import threading import types import warnings @@ -225,36 +226,53 @@ def __getattribute__(self, attr): """Trigger the load of the module and return the attribute.""" - # All module metadata must be garnered from __spec__ in order to avoid - # using mutated values. - # Stop triggering this method. - self.__class__ = types.ModuleType - # Get the original name to make sure no object substitution occurred - # in sys.modules. - original_name = self.__spec__.name - # Figure out exactly what attributes were mutated between the creation - # of the module and now. - attrs_then = self.__spec__.loader_state['__dict__'] - attrs_now = self.__dict__ - attrs_updated = {} - for key, value in attrs_now.items(): - # Code that set the attribute may have kept a reference to the - # assigned object, making identity more important than equality. - if key not in attrs_then: - attrs_updated[key] = value - elif id(attrs_now[key]) != id(attrs_then[key]): - attrs_updated[key] = value - self.__spec__.loader.exec_module(self) - # If exec_module() was used directly there is no guarantee the module - # object was put into sys.modules. - if original_name in sys.modules: - if id(self) != id(sys.modules[original_name]): - raise ValueError(f"module object for {original_name!r} " - "substituted in sys.modules during a lazy " - "load") - # Update after loading since that's what would happen in an eager - # loading situation. - self.__dict__.update(attrs_updated) + __spec__ = object.__getattribute__(self, '__spec__') + loader_state = __spec__.loader_state + with loader_state['lock']: + # Only the first thread to get the lock should trigger the load + # and reset the module's class. The rest can now getattr(). + if object.__getattribute__(self, '__class__') is _LazyModule: + # Reentrant calls from the same thread must be allowed to proceed without + # triggering the load again. + # exec_module() and self-referential imports are the primary ways this can + # happen, but in any case we must return something to avoid deadlock. + if loader_state['is_loading']: + return object.__getattribute__(self, attr) + loader_state['is_loading'] = True + + __dict__ = object.__getattribute__(self, '__dict__') + + # All module metadata must be gathered from __spec__ in order to avoid + # using mutated values. + # Get the original name to make sure no object substitution occurred + # in sys.modules. + original_name = __spec__.name + # Figure out exactly what attributes were mutated between the creation + # of the module and now. + attrs_then = loader_state['__dict__'] + attrs_now = __dict__ + attrs_updated = {} + for key, value in attrs_now.items(): + # Code that set an attribute may have kept a reference to the + # assigned object, making identity more important than equality. + if key not in attrs_then: + attrs_updated[key] = value + elif id(attrs_now[key]) != id(attrs_then[key]): + attrs_updated[key] = value + __spec__.loader.exec_module(self) + # If exec_module() was used directly there is no guarantee the module + # object was put into sys.modules. + if original_name in sys.modules: + if id(self) != id(sys.modules[original_name]): + raise ValueError(f"module object for {original_name!r} " + "substituted in sys.modules during a lazy " + "load") + # Update after loading since that's what would happen in an eager + # loading situation. + __dict__.update(attrs_updated) + # Finally, stop triggering this method. + self.__class__ = types.ModuleType + return getattr(self, attr) def __delattr__(self, attr): @@ -298,5 +316,7 @@ loader_state = {} loader_state['__dict__'] = module.__dict__.copy() loader_state['__class__'] = module.__class__ + loader_state['lock'] = threading.RLock() + loader_state['is_loading'] = False module.__spec__.loader_state = loader_state module.__class__ = _LazyModule diff -Nru python3.11-3.11.8/Lib/inspect.py python3.11-3.11.9/Lib/inspect.py --- python3.11-3.11.8/Lib/inspect.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/inspect.py 2024-04-02 08:25:04.000000000 +0000 @@ -748,18 +748,14 @@ :exc:`ValueError` is raised if a cycle is encountered. """ - if stop is None: - def _is_wrapper(f): - return hasattr(f, '__wrapped__') - else: - def _is_wrapper(f): - return hasattr(f, '__wrapped__') and not stop(f) f = func # remember the original func for error reporting # Memoise by id to tolerate non-hashable objects, but store objects to # ensure they aren't destroyed, which would allow their IDs to be reused. memo = {id(f): f} recursion_limit = sys.getrecursionlimit() - while _is_wrapper(func): + while not isinstance(func, type) and hasattr(func, '__wrapped__'): + if stop is not None and stop(func): + break func = func.__wrapped__ id_func = id(func) if (id_func in memo) or (len(memo) >= recursion_limit): @@ -1949,15 +1945,17 @@ named ``method_name`` and returns it only if it is a pure python function. """ - try: - meth = getattr(cls, method_name) - except AttributeError: - return + if method_name == '__new__': + meth = getattr(cls, method_name, None) else: - if not isinstance(meth, _NonUserDefinedCallables): - # Once '__signature__' will be added to 'C'-level - # callables, this check won't be necessary - return meth + meth = getattr_static(cls, method_name, None) + if meth is None or isinstance(meth, _NonUserDefinedCallables): + # Once '__signature__' will be added to 'C'-level + # callables, this check won't be necessary + return None + if method_name != '__new__': + meth = _descriptor_get(meth, cls) + return meth def _signature_get_partial(wrapped_sig, partial, extra_args=()): @@ -2425,6 +2423,15 @@ __validate_parameters__=is_duck_function) +def _descriptor_get(descriptor, obj): + if isclass(descriptor): + return descriptor + get = getattr(type(descriptor), '__get__', _sentinel) + if get is _sentinel: + return descriptor + return get(descriptor, obj, type(obj)) + + def _signature_from_callable(obj, *, follow_wrapper_chains=True, skip_bound_arg=True, @@ -2525,7 +2532,6 @@ wrapped_sig = _get_signature_of(obj.func) return _signature_get_partial(wrapped_sig, obj) - sig = None if isinstance(obj, type): # obj is a class or a metaclass @@ -2533,88 +2539,65 @@ # in its metaclass call = _signature_get_user_defined_method(type(obj), '__call__') if call is not None: - sig = _get_signature_of(call) - else: - factory_method = None - new = _signature_get_user_defined_method(obj, '__new__') - init = _signature_get_user_defined_method(obj, '__init__') - - # Go through the MRO and see if any class has user-defined - # pure Python __new__ or __init__ method - for base in obj.__mro__: - # Now we check if the 'obj' class has an own '__new__' method - if new is not None and '__new__' in base.__dict__: - factory_method = new - break - # or an own '__init__' method - elif init is not None and '__init__' in base.__dict__: - factory_method = init - break - - if factory_method is not None: - sig = _get_signature_of(factory_method) - - if sig is None: - # At this point we know, that `obj` is a class, with no user- - # defined '__init__', '__new__', or class-level '__call__' - - for base in obj.__mro__[:-1]: - # Since '__text_signature__' is implemented as a - # descriptor that extracts text signature from the - # class docstring, if 'obj' is derived from a builtin - # class, its own '__text_signature__' may be 'None'. - # Therefore, we go through the MRO (except the last - # class in there, which is 'object') to find the first - # class with non-empty text signature. - try: - text_sig = base.__text_signature__ - except AttributeError: - pass - else: - if text_sig: - # If 'base' class has a __text_signature__ attribute: - # return a signature based on it - return _signature_fromstr(sigcls, base, text_sig) - - # No '__text_signature__' was found for the 'obj' class. - # Last option is to check if its '__init__' is - # object.__init__ or type.__init__. - if type not in obj.__mro__: - # We have a class (not metaclass), but no user-defined - # __init__ or __new__ for it - if (obj.__init__ is object.__init__ and - obj.__new__ is object.__new__): - # Return a signature of 'object' builtin. - return sigcls.from_callable(object) - else: - raise ValueError( - 'no signature found for builtin type {!r}'.format(obj)) + return _get_signature_of(call) - elif not isinstance(obj, _NonUserDefinedCallables): - # An object with __call__ - # We also check that the 'obj' is not an instance of - # types.WrapperDescriptorType or types.MethodWrapperType to avoid - # infinite recursion (and even potential segfault) - call = _signature_get_user_defined_method(type(obj), '__call__') - if call is not None: + new = _signature_get_user_defined_method(obj, '__new__') + init = _signature_get_user_defined_method(obj, '__init__') + + # Go through the MRO and see if any class has user-defined + # pure Python __new__ or __init__ method + for base in obj.__mro__: + # Now we check if the 'obj' class has an own '__new__' method + if new is not None and '__new__' in base.__dict__: + sig = _get_signature_of(new) + if skip_bound_arg: + sig = _signature_bound_method(sig) + return sig + # or an own '__init__' method + elif init is not None and '__init__' in base.__dict__: + return _get_signature_of(init) + + # At this point we know, that `obj` is a class, with no user- + # defined '__init__', '__new__', or class-level '__call__' + + for base in obj.__mro__[:-1]: + # Since '__text_signature__' is implemented as a + # descriptor that extracts text signature from the + # class docstring, if 'obj' is derived from a builtin + # class, its own '__text_signature__' may be 'None'. + # Therefore, we go through the MRO (except the last + # class in there, which is 'object') to find the first + # class with non-empty text signature. try: - sig = _get_signature_of(call) - except ValueError as ex: - msg = 'no signature found for {!r}'.format(obj) - raise ValueError(msg) from ex - - if sig is not None: - # For classes and objects we skip the first parameter of their - # __call__, __new__, or __init__ methods - if skip_bound_arg: - return _signature_bound_method(sig) - else: - return sig + text_sig = base.__text_signature__ + except AttributeError: + pass + else: + if text_sig: + # If 'base' class has a __text_signature__ attribute: + # return a signature based on it + return _signature_fromstr(sigcls, base, text_sig) + + # No '__text_signature__' was found for the 'obj' class. + # Last option is to check if its '__init__' is + # object.__init__ or type.__init__. + if type not in obj.__mro__: + # We have a class (not metaclass), but no user-defined + # __init__ or __new__ for it + if (obj.__init__ is object.__init__ and + obj.__new__ is object.__new__): + # Return a signature of 'object' builtin. + return sigcls.from_callable(object) + else: + raise ValueError( + 'no signature found for builtin type {!r}'.format(obj)) - if isinstance(obj, types.BuiltinFunctionType): - # Raise a nicer error message for builtins - msg = 'no signature found for builtin function {!r}'.format(obj) - raise ValueError(msg) + else: + # An object with __call__ + call = getattr_static(type(obj), '__call__', None) + if call is not None: + call = _descriptor_get(call, obj) + return _get_signature_of(call) raise ValueError('callable {!r} is not supported by signature'.format(obj)) diff -Nru python3.11-3.11.8/Lib/json/encoder.py python3.11-3.11.9/Lib/json/encoder.py --- python3.11-3.11.8/Lib/json/encoder.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/json/encoder.py 2024-04-02 08:25:04.000000000 +0000 @@ -174,7 +174,7 @@ else: return list(iterable) # Let the base class default method raise the TypeError - return JSONEncoder.default(self, o) + return super().default(o) """ raise TypeError(f'Object of type {o.__class__.__name__} ' diff -Nru python3.11-3.11.8/Lib/linecache.py python3.11-3.11.9/Lib/linecache.py --- python3.11-3.11.8/Lib/linecache.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/linecache.py 2024-04-02 08:25:04.000000000 +0000 @@ -166,13 +166,11 @@ return False # Try for a __loader__, if available if module_globals and '__name__' in module_globals: - name = module_globals['__name__'] - if (loader := module_globals.get('__loader__')) is None: - if spec := module_globals.get('__spec__'): - try: - loader = spec.loader - except AttributeError: - pass + spec = module_globals.get('__spec__') + name = getattr(spec, 'name', None) or module_globals['__name__'] + loader = getattr(spec, 'loader', None) + if loader is None: + loader = module_globals.get('__loader__') get_source = getattr(loader, 'get_source', None) if name and get_source: diff -Nru python3.11-3.11.8/Lib/logging/__init__.py python3.11-3.11.9/Lib/logging/__init__.py --- python3.11-3.11.8/Lib/logging/__init__.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/logging/__init__.py 2024-04-02 08:25:04.000000000 +0000 @@ -1471,7 +1471,7 @@ To pass exception information, use the keyword argument exc_info with a true value, e.g. - logger.debug("Houston, we have a %s", "thorny problem", exc_info=1) + logger.debug("Houston, we have a %s", "thorny problem", exc_info=True) """ if self.isEnabledFor(DEBUG): self._log(DEBUG, msg, args, **kwargs) @@ -1483,7 +1483,7 @@ To pass exception information, use the keyword argument exc_info with a true value, e.g. - logger.info("Houston, we have a %s", "interesting problem", exc_info=1) + logger.info("Houston, we have a %s", "interesting problem", exc_info=True) """ if self.isEnabledFor(INFO): self._log(INFO, msg, args, **kwargs) @@ -1495,7 +1495,7 @@ To pass exception information, use the keyword argument exc_info with a true value, e.g. - logger.warning("Houston, we have a %s", "bit of a problem", exc_info=1) + logger.warning("Houston, we have a %s", "bit of a problem", exc_info=True) """ if self.isEnabledFor(WARNING): self._log(WARNING, msg, args, **kwargs) @@ -1512,7 +1512,7 @@ To pass exception information, use the keyword argument exc_info with a true value, e.g. - logger.error("Houston, we have a %s", "major problem", exc_info=1) + logger.error("Houston, we have a %s", "major problem", exc_info=True) """ if self.isEnabledFor(ERROR): self._log(ERROR, msg, args, **kwargs) @@ -1530,7 +1530,7 @@ To pass exception information, use the keyword argument exc_info with a true value, e.g. - logger.critical("Houston, we have a %s", "major disaster", exc_info=1) + logger.critical("Houston, we have a %s", "major disaster", exc_info=True) """ if self.isEnabledFor(CRITICAL): self._log(CRITICAL, msg, args, **kwargs) @@ -1548,7 +1548,7 @@ To pass exception information, use the keyword argument exc_info with a true value, e.g. - logger.log(level, "We have a %s", "mysterious problem", exc_info=1) + logger.log(level, "We have a %s", "mysterious problem", exc_info=True) """ if not isinstance(level, int): if raiseExceptions: @@ -1910,18 +1910,11 @@ """ return self.logger.hasHandlers() - def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=False): + def _log(self, level, msg, args, **kwargs): """ Low-level log implementation, proxied to allow nested logger adapters. """ - return self.logger._log( - level, - msg, - args, - exc_info=exc_info, - extra=extra, - stack_info=stack_info, - ) + return self.logger._log(level, msg, args, **kwargs) @property def manager(self): @@ -1981,7 +1974,7 @@ that this argument is incompatible with 'filename' - if both are present, 'stream' is ignored. handlers If specified, this should be an iterable of already created - handlers, which will be added to the root handler. Any handler + handlers, which will be added to the root logger. Any handler in the list which does not have a formatter assigned will be assigned the formatter created in this function. force If this keyword is specified as true, any existing handlers diff -Nru python3.11-3.11.8/Lib/logging/handlers.py python3.11-3.11.9/Lib/logging/handlers.py --- python3.11-3.11.8/Lib/logging/handlers.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/logging/handlers.py 2024-04-02 08:25:04.000000000 +0000 @@ -232,19 +232,19 @@ if self.when == 'S': self.interval = 1 # one second self.suffix = "%Y-%m-%d_%H-%M-%S" - self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}(\.\w+)?$" + extMatch = r"(? (plen + 1) and not fileName[plen+1].isdigit()): - continue - - if fileName[:plen] == prefix: - suffix = fileName[plen:] - # See bpo-45628: The date/time suffix could be anywhere in the - # filename - parts = suffix.split('.') - for part in parts: - if self.extMatch.match(part): + if self.namer is None: + prefix = baseName + '.' + plen = len(prefix) + for fileName in fileNames: + if fileName[:plen] == prefix: + suffix = fileName[plen:] + if self.extMatch.fullmatch(suffix): + result.append(os.path.join(dirName, fileName)) + else: + for fileName in fileNames: + # Our files could be just about anything after custom naming, + # but they should contain the datetime suffix. + # Try to find the datetime suffix in the file name and verify + # that the file name can be generated by this handler. + m = self.extMatch.search(fileName) + while m: + dfn = self.namer(self.baseFilename + "." + m[0]) + if os.path.basename(dfn) == fileName: result.append(os.path.join(dirName, fileName)) break + m = self.extMatch.search(fileName, m.start() + 1) + if len(result) < self.backupCount: result = [] else: @@ -410,17 +416,14 @@ then we have to get a list of matching filenames, sort them and remove the one with the oldest suffix. """ - if self.stream: - self.stream.close() - self.stream = None # get the time that this sequence started at and make it a TimeTuple currentTime = int(time.time()) - dstNow = time.localtime(currentTime)[-1] t = self.rolloverAt - self.interval if self.utc: timeTuple = time.gmtime(t) else: timeTuple = time.localtime(t) + dstNow = time.localtime(currentTime)[-1] dstThen = timeTuple[-1] if dstNow != dstThen: if dstNow: @@ -431,26 +434,19 @@ dfn = self.rotation_filename(self.baseFilename + "." + time.strftime(self.suffix, timeTuple)) if os.path.exists(dfn): - os.remove(dfn) + # Already rolled over. + return + + if self.stream: + self.stream.close() + self.stream = None self.rotate(self.baseFilename, dfn) if self.backupCount > 0: for s in self.getFilesToDelete(): os.remove(s) if not self.delay: self.stream = self._open() - newRolloverAt = self.computeRollover(currentTime) - while newRolloverAt <= currentTime: - newRolloverAt = newRolloverAt + self.interval - #If DST changes and midnight or weekly rollover, adjust for this. - if (self.when == 'MIDNIGHT' or self.when.startswith('W')) and not self.utc: - dstAtRollover = time.localtime(newRolloverAt)[-1] - if dstNow != dstAtRollover: - if not dstNow: # DST kicks in before next rollover, so we need to deduct an hour - addend = -3600 - else: # DST bows out before next rollover, so we need to add an hour - addend = 3600 - newRolloverAt += addend - self.rolloverAt = newRolloverAt + self.rolloverAt = self.computeRollover(currentTime) class WatchedFileHandler(logging.FileHandler): """ diff -Nru python3.11-3.11.8/Lib/mimetypes.py python3.11-3.11.9/Lib/mimetypes.py --- python3.11-3.11.8/Lib/mimetypes.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/mimetypes.py 2024-04-02 08:25:04.000000000 +0000 @@ -120,7 +120,13 @@ but non-standard types. """ url = os.fspath(url) - scheme, url = urllib.parse._splittype(url) + p = urllib.parse.urlparse(url) + if p.scheme and len(p.scheme) > 1: + scheme = p.scheme + url = p.path + else: + scheme = None + url = os.path.splitdrive(url)[1] if scheme == 'data': # syntax of data URLs: # dataurl := "data:" [ mediatype ] [ ";base64" ] "," data diff -Nru python3.11-3.11.8/Lib/multiprocessing/connection.py python3.11-3.11.9/Lib/multiprocessing/connection.py --- python3.11-3.11.8/Lib/multiprocessing/connection.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/multiprocessing/connection.py 2024-04-02 08:25:04.000000000 +0000 @@ -476,8 +476,9 @@ ''' if self._listener is None: raise OSError('listener is closed') + c = self._listener.accept() - if self._authkey: + if self._authkey is not None: deliver_challenge(c, self._authkey) answer_challenge(c, self._authkey) return c diff -Nru python3.11-3.11.8/Lib/os.py python3.11-3.11.9/Lib/os.py --- python3.11-3.11.8/Lib/os.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/os.py 2024-04-02 08:25:04.000000000 +0000 @@ -467,7 +467,7 @@ # lstat()/open()/fstat() trick. if not follow_symlinks: orig_st = stat(top, follow_symlinks=False, dir_fd=dir_fd) - topfd = open(top, O_RDONLY, dir_fd=dir_fd) + topfd = open(top, O_RDONLY | O_NONBLOCK, dir_fd=dir_fd) try: if (follow_symlinks or (st.S_ISDIR(orig_st.st_mode) and path.samestat(orig_st, stat(topfd)))): @@ -516,7 +516,7 @@ assert entries is not None name, entry = name orig_st = entry.stat(follow_symlinks=False) - dirfd = open(name, O_RDONLY, dir_fd=topfd) + dirfd = open(name, O_RDONLY | O_NONBLOCK, dir_fd=topfd) except OSError as err: if onerror is not None: onerror(err) diff -Nru python3.11-3.11.8/Lib/pdb.py python3.11-3.11.9/Lib/pdb.py --- python3.11-3.11.8/Lib/pdb.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/pdb.py 2024-04-02 08:25:04.000000000 +0000 @@ -153,6 +153,7 @@ __name__='__main__', __file__=self, __builtins__=__builtins__, + __spec__=None, ) @property @@ -294,26 +295,13 @@ # locals whenever the .f_locals accessor is called, so we # cache it here to ensure that modifications are not overwritten. self.curframe_locals = self.curframe.f_locals - return self.execRcLines() - # Can be executed earlier than 'setup' if desired - def execRcLines(self): - if not self.rcLines: - return - # local copy because of recursion - rcLines = self.rcLines - rcLines.reverse() - # execute every line only once - self.rcLines = [] - while rcLines: - line = rcLines.pop().strip() - if line and line[0] != '#': - if self.onecmd(line): - # if onecmd returns True, the command wants to exit - # from the interaction, save leftover rc lines - # to execute before next interaction - self.rcLines += reversed(rcLines) - return True + if self.rcLines: + self.cmdqueue = [ + line for line in self.rcLines + if line.strip() and not line.strip().startswith("#") + ] + self.rcLines = [] # Override Bdb methods @@ -424,12 +412,10 @@ pass else: Pdb._previous_sigint_handler = None - if self.setup(frame, traceback): - # no interaction desired at this time (happens if .pdbrc contains - # a command like "continue") - self.forget() - return - self.print_stack_entry(self.stack[self.curindex]) + self.setup(frame, traceback) + # if we have more commands to process, do not show the stack entry + if not self.cmdqueue: + self.print_stack_entry(self.stack[self.curindex]) self._cmdloop() self.forget() @@ -483,7 +469,7 @@ if marker >= 0: # queue up everything after marker next = line[marker+2:].lstrip() - self.cmdqueue.append(next) + self.cmdqueue.insert(0, next) line = line[:marker].rstrip() return line @@ -503,13 +489,12 @@ """Handles one command line during command list definition.""" cmd, arg, line = self.parseline(line) if not cmd: - return + return False if cmd == 'silent': self.commands_silent[self.commands_bnum] = True - return # continue to handle other cmd def in the cmd list + return False # continue to handle other cmd def in the cmd list elif cmd == 'end': - self.cmdqueue = [] - return 1 # end of cmd list + return True # end of cmd list cmdlist = self.commands[self.commands_bnum] if arg: cmdlist.append(cmd+' '+arg) @@ -523,9 +508,8 @@ # one of the resuming commands if func.__name__ in self.commands_resuming: self.commands_doprompt[self.commands_bnum] = False - self.cmdqueue = [] - return 1 - return + return True + return False # interface abstraction functions diff -Nru python3.11-3.11.8/Lib/pickletools.py python3.11-3.11.9/Lib/pickletools.py --- python3.11-3.11.8/Lib/pickletools.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/pickletools.py 2024-04-02 08:25:04.000000000 +0000 @@ -1253,7 +1253,7 @@ stack_before=[], stack_after=[pyint], proto=2, - doc="""Long integer using found-byte length. + doc="""Long integer using four-byte length. A more efficient encoding of a Python long; the long4 encoding says it all."""), diff -Nru python3.11-3.11.8/Lib/pydoc.py python3.11-3.11.9/Lib/pydoc.py --- python3.11-3.11.8/Lib/pydoc.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/pydoc.py 2024-04-02 08:25:04.000000000 +0000 @@ -204,6 +204,19 @@ name = object.__module__ + '.' + name return name +def parentname(object, modname): + """Get a name of the enclosing class (qualified it with a module name + if necessary) or module.""" + if '.' in object.__qualname__: + name = object.__qualname__.rpartition('.')[0] + if object.__module__ != modname: + return object.__module__ + '.' + name + else: + return name + else: + if object.__module__ != modname: + return object.__module__ + def isdata(object): """Check if an object is of a type that probably means it's data.""" return not (inspect.ismodule(object) or inspect.isclass(object) or @@ -298,13 +311,15 @@ return not name.startswith('_') def classify_class_attrs(object): - """Wrap inspect.classify_class_attrs, with fixup for data descriptors.""" + """Wrap inspect.classify_class_attrs, with fixup for data descriptors and bound methods.""" results = [] for (name, kind, cls, value) in inspect.classify_class_attrs(object): if inspect.isdatadescriptor(value): kind = 'data descriptor' if isinstance(value, property) and value.fset is None: kind = 'readonly property' + elif kind == 'method' and _is_bound_method(value): + kind = 'static method' results.append((name, kind, cls, value)) return results @@ -509,7 +524,7 @@ '_thread', 'zipimport') or (file.startswith(basedir) and not file.startswith(os.path.join(basedir, 'site-packages')))) and - object.__name__ not in ('xml.etree', 'test.pydoc_mod')): + object.__name__ not in ('xml.etree', 'test.test_pydoc.pydoc_mod')): if docloc.startswith(("http://", "https://")): docloc = "{}/{}.html".format(docloc.rstrip("/"), object.__name__.lower()) else: @@ -653,6 +668,25 @@ module.__name__, name, classname(object, modname)) return classname(object, modname) + def parentlink(self, object, modname): + """Make a link for the enclosing class or module.""" + link = None + name, module = object.__name__, sys.modules.get(object.__module__) + if hasattr(module, name) and getattr(module, name) is object: + if '.' in object.__qualname__: + name = object.__qualname__.rpartition('.')[0] + if object.__module__ != modname: + link = '%s.html#%s' % (module.__name__, name) + else: + link = '#%s' % name + else: + if object.__module__ != modname: + link = '%s.html' % module.__name__ + if link: + return '%s' % (link, parentname(object, modname)) + else: + return parentname(object, modname) + def modulelink(self, object): """Make a link for a module.""" return '%s' % (object.__name__, object.__name__) @@ -899,7 +933,7 @@ push(self.docdata(value, name, mod)) else: push(self.document(value, name, mod, - funcs, classes, mdict, object)) + funcs, classes, mdict, object, homecls)) push('\n') return attrs @@ -1022,24 +1056,44 @@ return self.grey('=' + self.repr(object)) def docroutine(self, object, name=None, mod=None, - funcs={}, classes={}, methods={}, cl=None): + funcs={}, classes={}, methods={}, cl=None, homecls=None): """Produce HTML documentation for a function or method object.""" realname = object.__name__ name = name or realname - anchor = (cl and cl.__name__ or '') + '-' + name + if homecls is None: + homecls = cl + anchor = ('' if cl is None else cl.__name__) + '-' + name note = '' - skipdocs = 0 + skipdocs = False + imfunc = None if _is_bound_method(object): - imclass = object.__self__.__class__ - if cl: - if imclass is not cl: - note = ' from ' + self.classlink(imclass, mod) + imself = object.__self__ + if imself is cl: + imfunc = getattr(object, '__func__', None) + elif inspect.isclass(imself): + note = ' class method of %s' % self.classlink(imself, mod) else: - if object.__self__ is not None: - note = ' method of %s instance' % self.classlink( - object.__self__.__class__, mod) - else: - note = ' unbound %s method' % self.classlink(imclass,mod) + note = ' method of %s instance' % self.classlink( + imself.__class__, mod) + elif (inspect.ismethoddescriptor(object) or + inspect.ismethodwrapper(object)): + try: + objclass = object.__objclass__ + except AttributeError: + pass + else: + if cl is None: + note = ' unbound %s method' % self.classlink(objclass, mod) + elif objclass is not homecls: + note = ' from ' + self.classlink(objclass, mod) + else: + imfunc = object + if inspect.isfunction(imfunc) and homecls is not None and ( + imfunc.__module__ != homecls.__module__ or + imfunc.__qualname__ != homecls.__qualname__ + '.' + realname): + pname = self.parentlink(imfunc, mod) + if pname: + note = ' from %s' % pname if (inspect.iscoroutinefunction(object) or inspect.isasyncgenfunction(object)): @@ -1050,10 +1104,13 @@ if name == realname: title = '%s' % (anchor, realname) else: - if cl and inspect.getattr_static(cl, realname, []) is object: + if (cl is not None and + inspect.getattr_static(cl, realname, []) is object): reallink = '%s' % ( cl.__name__ + '-' + realname, realname) - skipdocs = 1 + skipdocs = True + if note.startswith(' from '): + note = '' else: reallink = realname title = '%s = %s' % ( @@ -1071,7 +1128,8 @@ # XXX lambda's won't usually have func_annotations['return'] # since the syntax doesn't support but it is possible. # So removing parentheses isn't truly safe. - argspec = argspec[1:-1] # remove parentheses + if not object.__annotations__: + argspec = argspec[1:-1] # remove parentheses if not argspec: argspec = '(...)' @@ -1086,7 +1144,7 @@ doc = doc and '
%s
' % doc return '
%s
%s
\n' % (decl, doc) - def docdata(self, object, name=None, mod=None, cl=None): + def docdata(self, object, name=None, mod=None, cl=None, *ignored): """Produce html documentation for a data descriptor.""" results = [] push = results.append @@ -1198,7 +1256,7 @@ entry, modname, c, prefix + ' ') return result - def docmodule(self, object, name=None, mod=None): + def docmodule(self, object, name=None, mod=None, *ignored): """Produce text documentation for a given module object.""" name = object.__name__ # ignore the passed-in name synop, desc = splitdoc(getdoc(object)) @@ -1382,7 +1440,7 @@ push(self.docdata(value, name, mod)) else: push(self.document(value, - name, mod, object)) + name, mod, object, homecls)) return attrs def spilldescriptors(msg, attrs, predicate): @@ -1457,23 +1515,43 @@ """Format an argument default value as text.""" return '=' + self.repr(object) - def docroutine(self, object, name=None, mod=None, cl=None): + def docroutine(self, object, name=None, mod=None, cl=None, homecls=None): """Produce text documentation for a function or method object.""" realname = object.__name__ name = name or realname + if homecls is None: + homecls = cl note = '' - skipdocs = 0 + skipdocs = False + imfunc = None if _is_bound_method(object): - imclass = object.__self__.__class__ - if cl: - if imclass is not cl: - note = ' from ' + classname(imclass, mod) + imself = object.__self__ + if imself is cl: + imfunc = getattr(object, '__func__', None) + elif inspect.isclass(imself): + note = ' class method of %s' % classname(imself, mod) else: - if object.__self__ is not None: - note = ' method of %s instance' % classname( - object.__self__.__class__, mod) - else: - note = ' unbound %s method' % classname(imclass,mod) + note = ' method of %s instance' % classname( + imself.__class__, mod) + elif (inspect.ismethoddescriptor(object) or + inspect.ismethodwrapper(object)): + try: + objclass = object.__objclass__ + except AttributeError: + pass + else: + if cl is None: + note = ' unbound %s method' % classname(objclass, mod) + elif objclass is not homecls: + note = ' from ' + classname(objclass, mod) + else: + imfunc = object + if inspect.isfunction(imfunc) and homecls is not None and ( + imfunc.__module__ != homecls.__module__ or + imfunc.__qualname__ != homecls.__qualname__ + '.' + realname): + pname = parentname(imfunc, mod) + if pname: + note = ' from %s' % pname if (inspect.iscoroutinefunction(object) or inspect.isasyncgenfunction(object)): @@ -1484,8 +1562,11 @@ if name == realname: title = self.bold(realname) else: - if cl and inspect.getattr_static(cl, realname, []) is object: - skipdocs = 1 + if (cl is not None and + inspect.getattr_static(cl, realname, []) is object): + skipdocs = True + if note.startswith(' from '): + note = '' title = self.bold(name) + ' = ' + realname argspec = None @@ -1501,7 +1582,8 @@ # XXX lambda's won't usually have func_annotations['return'] # since the syntax doesn't support but it is possible. # So removing parentheses isn't truly safe. - argspec = argspec[1:-1] # remove parentheses + if not object.__annotations__: + argspec = argspec[1:-1] # remove parentheses if not argspec: argspec = '(...)' decl = asyncqualifier + title + argspec + note @@ -1512,7 +1594,7 @@ doc = getdoc(object) or '' return decl + '\n' + (doc and self.indent(doc).rstrip() + '\n') - def docdata(self, object, name=None, mod=None, cl=None): + def docdata(self, object, name=None, mod=None, cl=None, *ignored): """Produce text documentation for a data descriptor.""" results = [] push = results.append @@ -1528,7 +1610,8 @@ docproperty = docdata - def docother(self, object, name=None, mod=None, parent=None, maxlen=None, doc=None): + def docother(self, object, name=None, mod=None, parent=None, *ignored, + maxlen=None, doc=None): """Produce text documentation for a data object.""" repr = self.repr(object) if maxlen: @@ -2408,6 +2491,7 @@ threading.Thread.__init__(self) self.serving = False self.error = None + self.docserver = None def run(self): """Start the server.""" @@ -2440,9 +2524,9 @@ thread = ServerThread(urlhandler, hostname, port) thread.start() - # Wait until thread.serving is True to make sure we are - # really up before returning. - while not thread.error and not thread.serving: + # Wait until thread.serving is True and thread.docserver is set + # to make sure we are really up before returning. + while not thread.error and not (thread.serving and thread.docserver): time.sleep(.01) return thread diff -Nru python3.11-3.11.8/Lib/pydoc_data/topics.py python3.11-3.11.9/Lib/pydoc_data/topics.py --- python3.11-3.11.8/Lib/pydoc_data/topics.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/pydoc_data/topics.py 2024-04-02 08:25:04.000000000 +0000 @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Autogenerated by Sphinx on Tue Feb 6 21:21:08 2024 +# Autogenerated by Sphinx on Tue Apr 2 09:24:48 2024 # as part of the release process. topics = {'assert': 'The "assert" statement\n' '**********************\n' @@ -792,9 +792,9 @@ '\n' 'object.__dir__(self)\n' '\n' - ' Called when "dir()" is called on the object. A ' - 'sequence must be\n' - ' returned. "dir()" converts the returned sequence to a ' + ' Called when "dir()" is called on the object. An ' + 'iterable must be\n' + ' returned. "dir()" converts the returned iterable to a ' 'list and\n' ' sorts it.\n' '\n' @@ -821,8 +821,8 @@ 'returned.\n' '\n' 'The "__dir__" function should accept no arguments, and ' - 'return a\n' - 'sequence of strings that represents the names accessible ' + 'return an\n' + 'iterable of strings that represents the names accessible ' 'on module. If\n' 'present, this function overrides the standard "dir()" ' 'search on a\n' @@ -4492,7 +4492,7 @@ 'reflection,\n' ' and "__eq__()" and "__ne__()" are their own reflection. ' 'If the\n' - ' operands are of different types, and right operand’s ' + ' operands are of different types, and the right operand’s ' 'type is a\n' ' direct or indirect subclass of the left operand’s type, ' 'the\n' @@ -4502,6 +4502,11 @@ 'is not\n' ' considered.\n' '\n' + ' When no appropriate method returns any value other than\n' + ' "NotImplemented", the "==" and "!=" operators will fall ' + 'back to\n' + ' "is" and "is not", respectively.\n' + '\n' 'object.__hash__(self)\n' '\n' ' Called by built-in function "hash()" and for operations ' @@ -4957,22 +4962,23 @@ 'the\n' 'current directory, it is read with "\'utf-8\'" encoding and ' 'executed as\n' - 'if it had been typed at the debugger prompt. This is ' - 'particularly\n' - 'useful for aliases. If both files exist, the one in the home\n' - 'directory is read first and aliases defined there can be ' - 'overridden by\n' - 'the local file.\n' - '\n' - 'Changed in version 3.11: ".pdbrc" is now read with "\'utf-8\'" ' - 'encoding.\n' - 'Previously, it was read with the system locale encoding.\n' + 'if it had been typed at the debugger prompt, with the exception ' + 'that\n' + 'empty lines and lines starting with "#" are ignored. This is\n' + 'particularly useful for aliases. If both files exist, the one ' + 'in the\n' + 'home directory is read first and aliases defined there can be\n' + 'overridden by the local file.\n' '\n' 'Changed in version 3.2: ".pdbrc" can now contain commands that\n' 'continue debugging, such as "continue" or "next". Previously, ' 'these\n' 'commands had no effect.\n' '\n' + 'Changed in version 3.11: ".pdbrc" is now read with "\'utf-8\'" ' + 'encoding.\n' + 'Previously, it was read with the system locale encoding.\n' + '\n' 'h(elp) [command]\n' '\n' ' Without argument, print the list of available commands. With ' @@ -8110,7 +8116,7 @@ '"__rsub__()"\n' ' method, "type(y).__rsub__(y, x)" is called if ' '"type(x).__sub__(x,\n' - ' y)" returns *NotImplemented*.\n' + ' y)" returns "NotImplemented".\n' '\n' ' Note that ternary "pow()" will not try calling ' '"__rpow__()" (the\n' @@ -8153,14 +8159,18 @@ 'the result\n' ' (which could be, but does not have to be, *self*). If a ' 'specific\n' - ' method is not defined, the augmented assignment falls ' - 'back to the\n' - ' normal methods. For instance, if *x* is an instance of ' - 'a class\n' - ' with an "__iadd__()" method, "x += y" is equivalent to ' - '"x =\n' - ' x.__iadd__(y)" . Otherwise, "x.__add__(y)" and ' - '"y.__radd__(x)" are\n' + ' method is not defined, or if that method returns ' + '"NotImplemented",\n' + ' the augmented assignment falls back to the normal ' + 'methods. For\n' + ' instance, if *x* is an instance of a class with an ' + '"__iadd__()"\n' + ' method, "x += y" is equivalent to "x = x.__iadd__(y)" . ' + 'If\n' + ' "__iadd__()" does not exist, or if "x.__iadd__(y)" ' + 'returns\n' + ' "NotImplemented", "x.__add__(y)" and "y.__radd__(x)" ' + 'are\n' ' considered, as with the evaluation of "x + y". In ' 'certain\n' ' situations, augmented assignment can result in ' @@ -8241,7 +8251,7 @@ 'Every object has an identity, a type and a value. An object’s\n' '*identity* never changes once it has been created; you may think ' 'of it\n' - 'as the object’s address in memory. The ‘"is"’ operator compares ' + 'as the object’s address in memory. The "is" operator compares ' 'the\n' 'identity of two objects; the "id()" function returns an integer\n' 'representing its identity.\n' @@ -8306,7 +8316,7 @@ 'Note that the use of the implementation’s tracing or debugging\n' 'facilities may keep objects alive that would normally be ' 'collectable.\n' - 'Also note that catching an exception with a ‘"try"…"except"’ ' + 'Also note that catching an exception with a "try"…"except" ' 'statement\n' 'may keep objects alive.\n' '\n' @@ -8321,8 +8331,9 @@ 'release the external resource, usually a "close()" method. ' 'Programs\n' 'are strongly recommended to explicitly close such objects. The\n' - '‘"try"…"finally"’ statement and the ‘"with"’ statement provide\n' - 'convenient ways to do this.\n' + '"try"…"finally" statement and the "with" statement provide ' + 'convenient\n' + 'ways to do this.\n' '\n' 'Some objects contain references to other objects; these are ' 'called\n' @@ -8699,10 +8710,7 @@ 'The try statement.\n' '\n' 'Changed in version 3.3: "None" is now permitted as "Y" in "raise X\n' - 'from Y".\n' - '\n' - 'New in version 3.3: The "__suppress_context__" attribute to ' - 'suppress\n' + 'from Y".Added the "__suppress_context__" attribute to suppress\n' 'automatic display of the exception context.\n' '\n' 'Changed in version 3.11: If the traceback of the active exception ' @@ -9479,8 +9487,8 @@ 'reflection,\n' ' and "__eq__()" and "__ne__()" are their own reflection. ' 'If the\n' - ' operands are of different types, and right operand’s type ' - 'is a\n' + ' operands are of different types, and the right operand’s ' + 'type is a\n' ' direct or indirect subclass of the left operand’s type, ' 'the\n' ' reflected method of the right operand has priority, ' @@ -9489,6 +9497,11 @@ 'is not\n' ' considered.\n' '\n' + ' When no appropriate method returns any value other than\n' + ' "NotImplemented", the "==" and "!=" operators will fall ' + 'back to\n' + ' "is" and "is not", respectively.\n' + '\n' 'object.__hash__(self)\n' '\n' ' Called by built-in function "hash()" and for operations ' @@ -9730,9 +9743,9 @@ '\n' 'object.__dir__(self)\n' '\n' - ' Called when "dir()" is called on the object. A sequence ' + ' Called when "dir()" is called on the object. An iterable ' 'must be\n' - ' returned. "dir()" converts the returned sequence to a ' + ' returned. "dir()" converts the returned iterable to a ' 'list and\n' ' sorts it.\n' '\n' @@ -9759,8 +9772,8 @@ 'returned.\n' '\n' 'The "__dir__" function should accept no arguments, and ' - 'return a\n' - 'sequence of strings that represents the names accessible on ' + 'return an\n' + 'iterable of strings that represents the names accessible on ' 'module. If\n' 'present, this function overrides the standard "dir()" search ' 'on a\n' @@ -11023,7 +11036,7 @@ '"__rsub__()"\n' ' method, "type(y).__rsub__(y, x)" is called if ' '"type(x).__sub__(x,\n' - ' y)" returns *NotImplemented*.\n' + ' y)" returns "NotImplemented".\n' '\n' ' Note that ternary "pow()" will not try calling ' '"__rpow__()" (the\n' @@ -11066,14 +11079,17 @@ 'the result\n' ' (which could be, but does not have to be, *self*). If a ' 'specific\n' - ' method is not defined, the augmented assignment falls ' - 'back to the\n' - ' normal methods. For instance, if *x* is an instance of a ' - 'class\n' - ' with an "__iadd__()" method, "x += y" is equivalent to "x ' - '=\n' - ' x.__iadd__(y)" . Otherwise, "x.__add__(y)" and ' - '"y.__radd__(x)" are\n' + ' method is not defined, or if that method returns ' + '"NotImplemented",\n' + ' the augmented assignment falls back to the normal ' + 'methods. For\n' + ' instance, if *x* is an instance of a class with an ' + '"__iadd__()"\n' + ' method, "x += y" is equivalent to "x = x.__iadd__(y)" . ' + 'If\n' + ' "__iadd__()" does not exist, or if "x.__iadd__(y)" ' + 'returns\n' + ' "NotImplemented", "x.__add__(y)" and "y.__radd__(x)" are\n' ' considered, as with the evaluation of "x + y". In ' 'certain\n' ' situations, augmented assignment can result in unexpected ' @@ -12356,9 +12372,8 @@ '\n' 'New in version 3.3: The "\'rb\'" prefix of raw bytes literals has ' 'been\n' - 'added as a synonym of "\'br\'".\n' - '\n' - 'New in version 3.3: Support for the unicode legacy literal\n' + 'added as a synonym of "\'br\'".Support for the unicode legacy ' + 'literal\n' '("u\'value\'") was reintroduced to simplify the maintenance of ' 'dual\n' 'Python 2.x and 3.x codebases. See **PEP 414** for more ' @@ -13065,14 +13080,18 @@ 'contains\n' 'the numbers 0, 1, …, *n*-1. Item *i* of sequence *a* is selected ' 'by\n' - '"a[i]".\n' + '"a[i]". Some sequences, including built-in sequences, interpret\n' + 'negative subscripts by adding the sequence length. For example,\n' + '"a[-2]" equals "a[n-2]", the second to last item of sequence a ' + 'with\n' + 'length "n".\n' '\n' 'Sequences also support slicing: "a[i:j]" selects all items with ' 'index\n' '*k* such that *i* "<=" *k* "<" *j*. When used as an expression, a\n' - 'slice is a sequence of the same type. This implies that the index ' - 'set\n' - 'is renumbered so that it starts at 0.\n' + 'slice is a sequence of the same type. The comment above about ' + 'negative\n' + 'indexes also applies to negative slice positions.\n' '\n' 'Some sequences also support “extended slicing” with a third “step”\n' 'parameter: "a[i:j:k]" selects all items of *a* with index *x* where ' @@ -13809,7 +13828,9 @@ 'name |\n' '+----------------------------------------------------+----------------------------------------------------+\n' '| codeobject.co_qualname | The fully ' - 'qualified function name |\n' + 'qualified function name New in version |\n' + '| | ' + '3.11. |\n' '+----------------------------------------------------+----------------------------------------------------+\n' '| codeobject.co_argcount | The total ' 'number of positional *parameters* |\n' @@ -14025,6 +14046,14 @@ 'tools.\n' ' The PEP that introduced the "co_lines()" method.\n' '\n' + 'codeobject.replace(**kwargs)\n' + '\n' + ' Return a copy of the code object with new values for the ' + 'specified\n' + ' fields.\n' + '\n' + ' New in version 3.8.\n' + '\n' '\n' 'Frame objects\n' '-------------\n' @@ -15570,9 +15599,8 @@ 'objects\n' 'based on the sequence of values they define (instead of ' 'comparing\n' - 'based on object identity).\n' - '\n' - 'New in version 3.3: The "start", "stop" and "step" attributes.\n' + 'based on object identity).Added the "start", "stop" and "step"\n' + 'attributes.\n' '\n' 'See also:\n' '\n' diff -Nru python3.11-3.11.8/Lib/shutil.py python3.11-3.11.9/Lib/shutil.py --- python3.11-3.11.8/Lib/shutil.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/shutil.py 2024-04-02 08:25:04.000000000 +0000 @@ -662,7 +662,7 @@ continue if is_dir: try: - dirfd = os.open(entry.name, os.O_RDONLY, dir_fd=topfd) + dirfd = os.open(entry.name, os.O_RDONLY | os.O_NONBLOCK, dir_fd=topfd) dirfd_closed = False except OSError: onerror(os.open, fullname, sys.exc_info()) @@ -742,7 +742,7 @@ onerror(os.lstat, path, sys.exc_info()) return try: - fd = os.open(path, os.O_RDONLY, dir_fd=dir_fd) + fd = os.open(path, os.O_RDONLY | os.O_NONBLOCK, dir_fd=dir_fd) fd_closed = False except Exception: onerror(os.open, path, sys.exc_info()) diff -Nru python3.11-3.11.8/Lib/subprocess.py python3.11-3.11.9/Lib/subprocess.py --- python3.11-3.11.8/Lib/subprocess.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/subprocess.py 2024-04-02 08:25:04.000000000 +0000 @@ -1581,6 +1581,8 @@ """Internal implementation of wait() on Windows.""" if timeout is None: timeout_millis = _winapi.INFINITE + elif timeout <= 0: + timeout_millis = 0 else: timeout_millis = int(timeout * 1000) if self.returncode is None: diff -Nru python3.11-3.11.8/Lib/test/_test_multiprocessing.py python3.11-3.11.9/Lib/test/_test_multiprocessing.py --- python3.11-3.11.8/Lib/test/_test_multiprocessing.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/_test_multiprocessing.py 2024-04-02 08:25:04.000000000 +0000 @@ -3465,6 +3465,30 @@ if self.TYPE == 'processes': self.assertRaises(OSError, l.accept) + def test_empty_authkey(self): + # bpo-43952: allow empty bytes as authkey + def handler(*args): + raise RuntimeError('Connection took too long...') + + def run(addr, authkey): + client = self.connection.Client(addr, authkey=authkey) + client.send(1729) + + key = b'' + + with self.connection.Listener(authkey=key) as listener: + thread = threading.Thread(target=run, args=(listener.address, key)) + thread.start() + try: + with listener.accept() as d: + self.assertEqual(d.recv(), 1729) + finally: + thread.join() + + if self.TYPE == 'processes': + with self.assertRaises(OSError): + listener.accept() + @unittest.skipUnless(util.abstract_sockets_supported, "test needs abstract socket support") def test_abstract_socket(self): @@ -3932,6 +3956,21 @@ # test_multiprocessing_spawn, etc) in parallel. return prefix + str(os.getpid()) + def test_shared_memory_name_with_embedded_null(self): + name_tsmb = self._new_shm_name('test01_null') + sms = shared_memory.SharedMemory(name_tsmb, create=True, size=512) + self.addCleanup(sms.unlink) + with self.assertRaises(ValueError): + shared_memory.SharedMemory(name_tsmb + '\0a', create=False, size=512) + if shared_memory._USE_POSIX: + orig_name = sms._name + try: + sms._name = orig_name + '\0a' + with self.assertRaises(ValueError): + sms.unlink() + finally: + sms._name = orig_name + def test_shared_memory_basics(self): name_tsmb = self._new_shm_name('test01_tsmb') sms = shared_memory.SharedMemory(name_tsmb, create=True, size=512) @@ -4066,7 +4105,7 @@ self.addCleanup(shm2.unlink) self.assertEqual(shm2._name, names[1]) - def test_invalid_shared_memory_cration(self): + def test_invalid_shared_memory_creation(self): # Test creating a shared memory segment with negative size with self.assertRaises(ValueError): sms_invalid = shared_memory.SharedMemory(create=True, size=-1) diff -Nru python3.11-3.11.8/Lib/test/bisect_cmd.py python3.11-3.11.9/Lib/test/bisect_cmd.py --- python3.11-3.11.8/Lib/test/bisect_cmd.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/bisect_cmd.py 2024-04-02 08:25:04.000000000 +0000 @@ -51,6 +51,7 @@ cmd = [sys.executable] cmd.extend(subprocess._args_from_interpreter_flags()) cmd.extend(subprocess._optim_args_from_interpreter_flags()) + cmd.extend(('-X', 'faulthandler')) return cmd @@ -77,9 +78,13 @@ write_tests(tmp, tests) cmd = python_cmd() - cmd.extend(['-m', 'test', '--matchfile', tmp]) + cmd.extend(['-u', '-m', 'test', '--matchfile', tmp]) cmd.extend(args.test_args) print("+ %s" % format_shell_args(cmd)) + + sys.stdout.flush() + sys.stderr.flush() + proc = subprocess.run(cmd) return proc.returncode finally: @@ -136,8 +141,8 @@ ntest = max(ntest // 2, 1) subtests = random.sample(tests, ntest) - print("[+] Iteration %s: run %s tests/%s" - % (iteration, len(subtests), len(tests))) + print(f"[+] Iteration {iteration}/{args.max_iter}: " + f"run {len(subtests)} tests/{len(tests)}") print() exitcode = run_tests(args, subtests) @@ -169,10 +174,10 @@ if len(tests) <= args.max_tests: print("Bisection completed in %s iterations and %s" % (iteration, datetime.timedelta(seconds=dt))) - sys.exit(1) else: print("Bisection failed after %s iterations and %s" % (iteration, datetime.timedelta(seconds=dt))) + sys.exit(1) if __name__ == "__main__": diff -Nru python3.11-3.11.8/Lib/test/libregrtest/cmdline.py python3.11-3.11.9/Lib/test/libregrtest/cmdline.py --- python3.11-3.11.8/Lib/test/libregrtest/cmdline.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/libregrtest/cmdline.py 2024-04-02 08:25:04.000000000 +0000 @@ -172,6 +172,7 @@ self.fail_rerun = False self.tempdir = None self._add_python_opts = True + self.xmlpath = None super().__init__(**kwargs) @@ -347,6 +348,8 @@ help='override the working directory for the test run') group.add_argument('--cleanup', action='store_true', help='remove old test_python_* directories') + group.add_argument('--bisect', action='store_true', + help='if some tests fail, run test.bisect_cmd on them') group.add_argument('--dont-add-python-opts', dest='_add_python_opts', action='store_false', help="internal option, don't use it") @@ -494,17 +497,28 @@ ns.randomize = True if ns.verbose: ns.header = True + # When -jN option is used, a worker process does not use --verbose3 # and so -R 3:3 -jN --verbose3 just works as expected: there is no false # alarm about memory leak. if ns.huntrleaks and ns.verbose3 and ns.use_mp is None: - ns.verbose3 = False # run_single_test() replaces sys.stdout with io.StringIO if verbose3 # is true. In this case, huntrleaks sees an write into StringIO as # a memory leak, whereas it is not (gh-71290). + ns.verbose3 = False print("WARNING: Disable --verbose3 because it's incompatible with " "--huntrleaks without -jN option", file=sys.stderr) + + if ns.huntrleaks and ns.xmlpath: + # The XML data is written into a file outside runtest_refleak(), so + # it looks like a leak but it's not. Simply disable XML output when + # hunting for reference leaks (gh-83434). + ns.xmlpath = None + print("WARNING: Disable --junit-xml because it's incompatible " + "with --huntrleaks", + file=sys.stderr) + if ns.forever: # --forever implies --failfast ns.failfast = True diff -Nru python3.11-3.11.8/Lib/test/libregrtest/filter.py python3.11-3.11.9/Lib/test/libregrtest/filter.py --- python3.11-3.11.8/Lib/test/libregrtest/filter.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/libregrtest/filter.py 2024-04-02 08:25:04.000000000 +0000 @@ -27,6 +27,11 @@ return ('.' in pattern) and (not re.search(r'[?*\[\]]', pattern)) +def get_match_tests(): + global _test_patterns + return _test_patterns + + def set_match_tests(patterns): global _test_matchers, _test_patterns diff -Nru python3.11-3.11.8/Lib/test/libregrtest/findtests.py python3.11-3.11.9/Lib/test/libregrtest/findtests.py --- python3.11-3.11.8/Lib/test/libregrtest/findtests.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/libregrtest/findtests.py 2024-04-02 08:25:04.000000000 +0000 @@ -23,6 +23,7 @@ "test_future_stmt", "test_gdb", "test_inspect", + "test_pydoc", "test_multiprocessing_fork", "test_multiprocessing_forkserver", "test_multiprocessing_spawn", diff -Nru python3.11-3.11.8/Lib/test/libregrtest/main.py python3.11-3.11.9/Lib/test/libregrtest/main.py --- python3.11-3.11.8/Lib/test/libregrtest/main.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/libregrtest/main.py 2024-04-02 08:25:04.000000000 +0000 @@ -6,8 +6,7 @@ import sysconfig import time -from test import support -from test.support import os_helper, MS_WINDOWS +from test.support import os_helper, MS_WINDOWS, flush_std_streams from .cmdline import _parse_args, Namespace from .findtests import findtests, split_test_packages, list_cases @@ -72,6 +71,7 @@ self.want_cleanup: bool = ns.cleanup self.want_rerun: bool = ns.rerun self.want_run_leaks: bool = ns.runleaks + self.want_bisect: bool = ns.bisect self.ci_mode: bool = (ns.fast_ci or ns.slow_ci) self.want_add_python_opts: bool = (_add_python_opts @@ -272,6 +272,55 @@ self.display_result(rerun_runtests) + def _run_bisect(self, runtests: RunTests, test: str, progress: str) -> bool: + print() + title = f"Bisect {test}" + if progress: + title = f"{title} ({progress})" + print(title) + print("#" * len(title)) + print() + + cmd = runtests.create_python_cmd() + cmd.extend([ + "-u", "-m", "test.bisect_cmd", + # Limit to 25 iterations (instead of 100) to not abuse CI resources + "--max-iter", "25", + "-v", + # runtests.match_tests is not used (yet) for bisect_cmd -i arg + ]) + cmd.extend(runtests.bisect_cmd_args()) + cmd.append(test) + print("+", shlex.join(cmd), flush=True) + + flush_std_streams() + + import subprocess + proc = subprocess.run(cmd, timeout=runtests.timeout) + exitcode = proc.returncode + + title = f"{title}: exit code {exitcode}" + print(title) + print("#" * len(title)) + print(flush=True) + + if exitcode: + print(f"Bisect failed with exit code {exitcode}") + return False + + return True + + def run_bisect(self, runtests: RunTests) -> None: + tests, _ = self.results.prepare_rerun(clear=False) + + for index, name in enumerate(tests, 1): + if len(tests) > 1: + progress = f"{index}/{len(tests)}" + else: + progress = "" + if not self._run_bisect(runtests, name, progress): + return + def display_result(self, runtests): # If running the test suite for PGO then no one cares about results. if runtests.pgo: @@ -453,7 +502,7 @@ setup_process() - if self.hunt_refleak and not self.num_workers: + if (runtests.hunt_refleak is not None) and (not self.num_workers): # gh-109739: WindowsLoadTracker thread interfers with refleak check use_load_tracker = False else: @@ -473,6 +522,9 @@ if self.want_rerun and self.results.need_rerun(): self.rerun_failed_tests(runtests) + + if self.want_bisect and self.results.need_rerun(): + self.run_bisect(runtests) finally: if use_load_tracker: self.logger.stop_load_tracker() diff -Nru python3.11-3.11.8/Lib/test/libregrtest/refleak.py python3.11-3.11.9/Lib/test/libregrtest/refleak.py --- python3.11-3.11.8/Lib/test/libregrtest/refleak.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/libregrtest/refleak.py 2024-04-02 08:25:04.000000000 +0000 @@ -86,9 +86,12 @@ rc_before = alloc_before = fd_before = 0 if not quiet: - print("beginning", repcount, "repetitions", file=sys.stderr) - print(("1234567890"*(repcount//10 + 1))[:repcount], file=sys.stderr, - flush=True) + print("beginning", repcount, "repetitions. Showing number of leaks " + "(. for 0 or less, X for 10 or more)", + file=sys.stderr) + numbers = ("1234567890"*(repcount//10 + 1))[:repcount] + numbers = numbers[:warmups] + ':' + numbers[warmups:] + print(numbers, file=sys.stderr, flush=True) results = None dash_R_cleanup(fs, ps, pic, zdc, abcs) @@ -105,13 +108,27 @@ rc_after = gettotalrefcount() fd_after = fd_count() - if not quiet: - print('.', end='', file=sys.stderr, flush=True) - rc_deltas[i] = get_pooled_int(rc_after - rc_before) alloc_deltas[i] = get_pooled_int(alloc_after - alloc_before) fd_deltas[i] = get_pooled_int(fd_after - fd_before) + if not quiet: + # use max, not sum, so total_leaks is one of the pooled ints + total_leaks = max(rc_deltas[i], alloc_deltas[i], fd_deltas[i]) + if total_leaks <= 0: + symbol = '.' + elif total_leaks < 10: + symbol = ( + '.', '1', '2', '3', '4', '5', '6', '7', '8', '9', + )[total_leaks] + else: + symbol = 'X' + if i == warmups: + print(' ', end='', file=sys.stderr, flush=True) + print(symbol, end='', file=sys.stderr, flush=True) + del total_leaks + del symbol + alloc_before = alloc_after rc_before = rc_after fd_before = fd_after @@ -146,14 +163,20 @@ ]: # ignore warmup runs deltas = deltas[warmups:] - if checker(deltas): + failing = checker(deltas) + suspicious = any(deltas) + if failing or suspicious: msg = '%s leaked %s %s, sum=%s' % ( test_name, deltas, item_name, sum(deltas)) - print(msg, file=sys.stderr, flush=True) - with open(filename, "a", encoding="utf-8") as refrep: - print(msg, file=refrep) - refrep.flush() - failed = True + print(msg, end='', file=sys.stderr) + if failing: + print(file=sys.stderr, flush=True) + with open(filename, "a", encoding="utf-8") as refrep: + print(msg, file=refrep) + refrep.flush() + failed = True + else: + print(' (this is fine)', file=sys.stderr, flush=True) return (failed, results) diff -Nru python3.11-3.11.8/Lib/test/libregrtest/results.py python3.11-3.11.9/Lib/test/libregrtest/results.py --- python3.11-3.11.8/Lib/test/libregrtest/results.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/libregrtest/results.py 2024-04-02 08:25:04.000000000 +0000 @@ -129,7 +129,7 @@ def need_rerun(self): return bool(self.rerun_results) - def prepare_rerun(self) -> tuple[TestTuple, FilterDict]: + def prepare_rerun(self, *, clear: bool = True) -> tuple[TestTuple, FilterDict]: tests: TestList = [] match_tests_dict = {} for result in self.rerun_results: @@ -140,11 +140,12 @@ if match_tests: match_tests_dict[result.test_name] = match_tests - # Clear previously failed tests - self.rerun_bad.extend(self.bad) - self.bad.clear() - self.env_changed.clear() - self.rerun_results.clear() + if clear: + # Clear previously failed tests + self.rerun_bad.extend(self.bad) + self.bad.clear() + self.env_changed.clear() + self.rerun_results.clear() return (tuple(tests), match_tests_dict) diff -Nru python3.11-3.11.8/Lib/test/libregrtest/run_workers.py python3.11-3.11.9/Lib/test/libregrtest/run_workers.py --- python3.11-3.11.8/Lib/test/libregrtest/run_workers.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/libregrtest/run_workers.py 2024-04-02 08:25:04.000000000 +0000 @@ -209,7 +209,7 @@ self._popen = None def create_stdout(self, stack: contextlib.ExitStack) -> TextIO: - """Create stdout temporay file (file descriptor).""" + """Create stdout temporary file (file descriptor).""" if MS_WINDOWS: # gh-95027: When stdout is not a TTY, Python uses the ANSI code diff -Nru python3.11-3.11.8/Lib/test/libregrtest/runtests.py python3.11-3.11.9/Lib/test/libregrtest/runtests.py --- python3.11-3.11.8/Lib/test/libregrtest/runtests.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/libregrtest/runtests.py 2024-04-02 08:25:04.000000000 +0000 @@ -2,7 +2,9 @@ import dataclasses import json import os +import shlex import subprocess +import sys from typing import Any from test import support @@ -67,6 +69,11 @@ runs: int filename: StrPath + def bisect_cmd_args(self) -> list[str]: + # Ignore filename since it can contain colon (":"), + # and usually it's not used. Use the default filename. + return ["-R", f"{self.warmups}:{self.runs}:"] + @dataclasses.dataclass(slots=True, frozen=True) class RunTests: @@ -136,6 +143,47 @@ or support.is_wasi ) + def create_python_cmd(self) -> list[str]: + python_opts = support.args_from_interpreter_flags() + if self.python_cmd is not None: + executable = self.python_cmd + # Remove -E option, since --python=COMMAND can set PYTHON + # environment variables, such as PYTHONPATH, in the worker + # process. + python_opts = [opt for opt in python_opts if opt != "-E"] + else: + executable = (sys.executable,) + cmd = [*executable, *python_opts] + if '-u' not in python_opts: + cmd.append('-u') # Unbuffered stdout and stderr + return cmd + + def bisect_cmd_args(self) -> list[str]: + args = [] + if self.fail_fast: + args.append("--failfast") + if self.fail_env_changed: + args.append("--fail-env-changed") + if self.timeout: + args.append(f"--timeout={self.timeout}") + if self.hunt_refleak is not None: + args.extend(self.hunt_refleak.bisect_cmd_args()) + if self.test_dir: + args.extend(("--testdir", self.test_dir)) + if self.memory_limit: + args.extend(("--memlimit", self.memory_limit)) + if self.gc_threshold: + args.append(f"--threshold={self.gc_threshold}") + if self.use_resources: + args.extend(("-u", ','.join(self.use_resources))) + if self.python_cmd: + cmd = shlex.join(self.python_cmd) + args.extend(("--python", cmd)) + if self.randomize: + args.append(f"--randomize") + args.append(f"--randseed={self.random_seed}") + return args + @dataclasses.dataclass(slots=True, frozen=True) class WorkerRunTests(RunTests): diff -Nru python3.11-3.11.8/Lib/test/libregrtest/utils.py python3.11-3.11.9/Lib/test/libregrtest/utils.py --- python3.11-3.11.8/Lib/test/libregrtest/utils.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/libregrtest/utils.py 2024-04-02 08:25:04.000000000 +0000 @@ -414,7 +414,7 @@ # the tests. The name of the dir includes the pid to allow parallel # testing (see the -j option). # Emscripten and WASI have stubbed getpid(), Emscripten has only - # milisecond clock resolution. Use randint() instead. + # millisecond clock resolution. Use randint() instead. if support.is_emscripten or support.is_wasi: nounce = random.randint(0, 1_000_000) else: diff -Nru python3.11-3.11.8/Lib/test/libregrtest/win_utils.py python3.11-3.11.9/Lib/test/libregrtest/win_utils.py --- python3.11-3.11.8/Lib/test/libregrtest/win_utils.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/libregrtest/win_utils.py 2024-04-02 08:25:04.000000000 +0000 @@ -24,6 +24,10 @@ """ def __init__(self): + # make __del__ not fail if pre-flight test fails + self._running = None + self._stopped = None + # Pre-flight test for access to the performance data; # `PermissionError` will be raised if not allowed winreg.QueryInfoKey(winreg.HKEY_PERFORMANCE_DATA) diff -Nru python3.11-3.11.8/Lib/test/libregrtest/worker.py python3.11-3.11.9/Lib/test/libregrtest/worker.py --- python3.11-3.11.8/Lib/test/libregrtest/worker.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/libregrtest/worker.py 2024-04-02 08:25:04.000000000 +0000 @@ -3,7 +3,6 @@ import os from typing import Any, NoReturn -from test import support from test.support import os_helper from .setup import setup_process, setup_test_dir @@ -19,21 +18,10 @@ def create_worker_process(runtests: WorkerRunTests, output_fd: int, tmp_dir: StrPath | None = None) -> subprocess.Popen: - python_cmd = runtests.python_cmd worker_json = runtests.as_json() - python_opts = support.args_from_interpreter_flags() - if python_cmd is not None: - executable = python_cmd - # Remove -E option, since --python=COMMAND can set PYTHON environment - # variables, such as PYTHONPATH, in the worker process. - python_opts = [opt for opt in python_opts if opt != "-E"] - else: - executable = (sys.executable,) - cmd = [*executable, *python_opts, - '-u', # Unbuffered stdout and stderr - '-m', 'test.libregrtest.worker', - worker_json] + cmd = runtests.create_python_cmd() + cmd.extend(['-m', 'test.libregrtest.worker', worker_json]) env = dict(os.environ) if tmp_dir is not None: diff -Nru python3.11-3.11.8/Lib/test/pydoc_mod.py python3.11-3.11.9/Lib/test/pydoc_mod.py --- python3.11-3.11.8/Lib/test/pydoc_mod.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/pydoc_mod.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,51 +0,0 @@ -"""This is a test module for test_pydoc""" - -from __future__ import print_function - -import types -import typing - -__author__ = "Benjamin Peterson" -__credits__ = "Nobody" -__version__ = "1.2.3.4" -__xyz__ = "X, Y and Z" - -class A: - """Hello and goodbye""" - def __init__(): - """Wow, I have no function!""" - pass - -class B(object): - NO_MEANING: str = "eggs" - pass - -class C(object): - def say_no(self): - return "no" - def get_answer(self): - """ Return say_no() """ - return self.say_no() - def is_it_true(self): - """ Return self.get_answer() """ - return self.get_answer() - def __class_getitem__(self, item): - return types.GenericAlias(self, item) - -def doc_func(): - """ - This function solves all of the world's problems: - hunger - lack of Python - war - """ - -def nodoc_func(): - pass - - -list_alias1 = typing.List[int] -list_alias2 = list[int] -c_alias = C[int] -type_union1 = typing.Union[int, str] -type_union2 = int | str diff -Nru python3.11-3.11.8/Lib/test/pydocfodder.py python3.11-3.11.9/Lib/test/pydocfodder.py --- python3.11-3.11.8/Lib/test/pydocfodder.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/pydocfodder.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,216 +0,0 @@ -"""Something just to look at via pydoc.""" - -import types - -class A_classic: - "A classic class." - def A_method(self): - "Method defined in A." - def AB_method(self): - "Method defined in A and B." - def AC_method(self): - "Method defined in A and C." - def AD_method(self): - "Method defined in A and D." - def ABC_method(self): - "Method defined in A, B and C." - def ABD_method(self): - "Method defined in A, B and D." - def ACD_method(self): - "Method defined in A, C and D." - def ABCD_method(self): - "Method defined in A, B, C and D." - - -class B_classic(A_classic): - "A classic class, derived from A_classic." - def AB_method(self): - "Method defined in A and B." - def ABC_method(self): - "Method defined in A, B and C." - def ABD_method(self): - "Method defined in A, B and D." - def ABCD_method(self): - "Method defined in A, B, C and D." - def B_method(self): - "Method defined in B." - def BC_method(self): - "Method defined in B and C." - def BD_method(self): - "Method defined in B and D." - def BCD_method(self): - "Method defined in B, C and D." - -class C_classic(A_classic): - "A classic class, derived from A_classic." - def AC_method(self): - "Method defined in A and C." - def ABC_method(self): - "Method defined in A, B and C." - def ACD_method(self): - "Method defined in A, C and D." - def ABCD_method(self): - "Method defined in A, B, C and D." - def BC_method(self): - "Method defined in B and C." - def BCD_method(self): - "Method defined in B, C and D." - def C_method(self): - "Method defined in C." - def CD_method(self): - "Method defined in C and D." - -class D_classic(B_classic, C_classic): - "A classic class, derived from B_classic and C_classic." - def AD_method(self): - "Method defined in A and D." - def ABD_method(self): - "Method defined in A, B and D." - def ACD_method(self): - "Method defined in A, C and D." - def ABCD_method(self): - "Method defined in A, B, C and D." - def BD_method(self): - "Method defined in B and D." - def BCD_method(self): - "Method defined in B, C and D." - def CD_method(self): - "Method defined in C and D." - def D_method(self): - "Method defined in D." - - -class A_new(object): - "A new-style class." - - def A_method(self): - "Method defined in A." - def AB_method(self): - "Method defined in A and B." - def AC_method(self): - "Method defined in A and C." - def AD_method(self): - "Method defined in A and D." - def ABC_method(self): - "Method defined in A, B and C." - def ABD_method(self): - "Method defined in A, B and D." - def ACD_method(self): - "Method defined in A, C and D." - def ABCD_method(self): - "Method defined in A, B, C and D." - - def A_classmethod(cls, x): - "A class method defined in A." - A_classmethod = classmethod(A_classmethod) - - def A_staticmethod(): - "A static method defined in A." - A_staticmethod = staticmethod(A_staticmethod) - - def _getx(self): - "A property getter function." - def _setx(self, value): - "A property setter function." - def _delx(self): - "A property deleter function." - A_property = property(fdel=_delx, fget=_getx, fset=_setx, - doc="A sample property defined in A.") - - A_int_alias = int - -class B_new(A_new): - "A new-style class, derived from A_new." - - def AB_method(self): - "Method defined in A and B." - def ABC_method(self): - "Method defined in A, B and C." - def ABD_method(self): - "Method defined in A, B and D." - def ABCD_method(self): - "Method defined in A, B, C and D." - def B_method(self): - "Method defined in B." - def BC_method(self): - "Method defined in B and C." - def BD_method(self): - "Method defined in B and D." - def BCD_method(self): - "Method defined in B, C and D." - -class C_new(A_new): - "A new-style class, derived from A_new." - - def AC_method(self): - "Method defined in A and C." - def ABC_method(self): - "Method defined in A, B and C." - def ACD_method(self): - "Method defined in A, C and D." - def ABCD_method(self): - "Method defined in A, B, C and D." - def BC_method(self): - "Method defined in B and C." - def BCD_method(self): - "Method defined in B, C and D." - def C_method(self): - "Method defined in C." - def CD_method(self): - "Method defined in C and D." - -class D_new(B_new, C_new): - """A new-style class, derived from B_new and C_new. - """ - - def AD_method(self): - "Method defined in A and D." - def ABD_method(self): - "Method defined in A, B and D." - def ACD_method(self): - "Method defined in A, C and D." - def ABCD_method(self): - "Method defined in A, B, C and D." - def BD_method(self): - "Method defined in B and D." - def BCD_method(self): - "Method defined in B, C and D." - def CD_method(self): - "Method defined in C and D." - def D_method(self): - "Method defined in D." - -class FunkyProperties(object): - """From SF bug 472347, by Roeland Rengelink. - - Property getters etc may not be vanilla functions or methods, - and this used to make GUI pydoc blow up. - """ - - def __init__(self): - self.desc = {'x':0} - - class get_desc: - def __init__(self, attr): - self.attr = attr - def __call__(self, inst): - print('Get called', self, inst) - return inst.desc[self.attr] - class set_desc: - def __init__(self, attr): - self.attr = attr - def __call__(self, inst, val): - print('Set called', self, inst, val) - inst.desc[self.attr] = val - class del_desc: - def __init__(self, attr): - self.attr = attr - def __call__(self, inst): - print('Del called', self, inst) - del inst.desc[self.attr] - - x = property(get_desc('x'), set_desc('x'), del_desc('x'), 'prop x') - - -submodule = types.ModuleType(__name__ + '.submodule', - """A submodule, which should appear in its parent's summary""") diff -Nru python3.11-3.11.8/Lib/test/sortperf.py python3.11-3.11.9/Lib/test/sortperf.py --- python3.11-3.11.8/Lib/test/sortperf.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/sortperf.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,169 +0,0 @@ -"""Sort performance test. - -See main() for command line syntax. -See tabulate() for output format. - -""" - -import sys -import time -import random -import marshal -import tempfile -import os - -td = tempfile.gettempdir() - -def randfloats(n): - """Return a list of n random floats in [0, 1).""" - # Generating floats is expensive, so this writes them out to a file in - # a temp directory. If the file already exists, it just reads them - # back in and shuffles them a bit. - fn = os.path.join(td, "rr%06d" % n) - try: - fp = open(fn, "rb") - except OSError: - r = random.random - result = [r() for i in range(n)] - try: - try: - fp = open(fn, "wb") - marshal.dump(result, fp) - fp.close() - fp = None - finally: - if fp: - try: - os.unlink(fn) - except OSError: - pass - except OSError as msg: - print("can't write", fn, ":", msg) - else: - result = marshal.load(fp) - fp.close() - # Shuffle it a bit... - for i in range(10): - i = random.randrange(n) - temp = result[:i] - del result[:i] - temp.reverse() - result.extend(temp) - del temp - assert len(result) == n - return result - -def flush(): - sys.stdout.flush() - -def doit(L): - t0 = time.perf_counter() - L.sort() - t1 = time.perf_counter() - print("%6.2f" % (t1-t0), end=' ') - flush() - -def tabulate(r): - r"""Tabulate sort speed for lists of various sizes. - - The sizes are 2**i for i in r (the argument, a list). - - The output displays i, 2**i, and the time to sort arrays of 2**i - floating point numbers with the following properties: - - *sort: random data - \sort: descending data - /sort: ascending data - 3sort: ascending, then 3 random exchanges - +sort: ascending, then 10 random at the end - %sort: ascending, then randomly replace 1% of the elements w/ random values - ~sort: many duplicates - =sort: all equal - !sort: worst case scenario - - """ - cases = tuple([ch + "sort" for ch in r"*\/3+%~=!"]) - fmt = ("%2s %7s" + " %6s"*len(cases)) - print(fmt % (("i", "2**i") + cases)) - for i in r: - n = 1 << i - L = randfloats(n) - print("%2d %7d" % (i, n), end=' ') - flush() - doit(L) # *sort - L.reverse() - doit(L) # \sort - doit(L) # /sort - - # Do 3 random exchanges. - for dummy in range(3): - i1 = random.randrange(n) - i2 = random.randrange(n) - L[i1], L[i2] = L[i2], L[i1] - doit(L) # 3sort - - # Replace the last 10 with random floats. - if n >= 10: - L[-10:] = [random.random() for dummy in range(10)] - doit(L) # +sort - - # Replace 1% of the elements at random. - for dummy in range(n // 100): - L[random.randrange(n)] = random.random() - doit(L) # %sort - - # Arrange for lots of duplicates. - if n > 4: - del L[4:] - L = L * (n // 4) - # Force the elements to be distinct objects, else timings can be - # artificially low. - L = list(map(lambda x: --x, L)) - doit(L) # ~sort - del L - - # All equal. Again, force the elements to be distinct objects. - L = list(map(abs, [-0.5] * n)) - doit(L) # =sort - del L - - # This one looks like [3, 2, 1, 0, 0, 1, 2, 3]. It was a bad case - # for an older implementation of quicksort, which used the median - # of the first, last and middle elements as the pivot. - half = n // 2 - L = list(range(half - 1, -1, -1)) - L.extend(range(half)) - # Force to float, so that the timings are comparable. This is - # significantly faster if we leave them as ints. - L = list(map(float, L)) - doit(L) # !sort - print() - -def main(): - """Main program when invoked as a script. - - One argument: tabulate a single row. - Two arguments: tabulate a range (inclusive). - Extra arguments are used to seed the random generator. - - """ - # default range (inclusive) - k1 = 15 - k2 = 20 - if sys.argv[1:]: - # one argument: single point - k1 = k2 = int(sys.argv[1]) - if sys.argv[2:]: - # two arguments: specify range - k2 = int(sys.argv[2]) - if sys.argv[3:]: - # derive random seed from remaining arguments - x = 1 - for a in sys.argv[3:]: - x = 69069 * x + hash(a) - random.seed(x) - r = range(k1, k2+1) # include the end point - tabulate(r) - -if __name__ == '__main__': - main() diff -Nru python3.11-3.11.8/Lib/test/support/__init__.py python3.11-3.11.9/Lib/test/support/__init__.py --- python3.11-3.11.8/Lib/test/support/__init__.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/support/__init__.py 2024-04-02 08:25:04.000000000 +0000 @@ -56,6 +56,7 @@ "run_with_tz", "PGO", "missing_compiler_executable", "ALWAYS_EQ", "NEVER_EQ", "LARGEST", "SMALLEST", "LOOPBACK_TIMEOUT", "INTERNET_TIMEOUT", "SHORT_TIMEOUT", "LONG_TIMEOUT", + "skip_on_s390x", ] @@ -2238,3 +2239,7 @@ 'build', } return ignored + +#Windows doesn't have os.uname() but it doesn't support s390x. +skip_on_s390x = unittest.skipIf(hasattr(os, 'uname') and os.uname().machine == 's390x', + 'skipped on s390x') diff -Nru python3.11-3.11.8/Lib/test/support/import_helper.py python3.11-3.11.9/Lib/test/support/import_helper.py --- python3.11-3.11.8/Lib/test/support/import_helper.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/support/import_helper.py 2024-04-02 08:25:04.000000000 +0000 @@ -249,6 +249,26 @@ @contextlib.contextmanager +def isolated_modules(): + """ + Save modules on entry and cleanup on exit. + """ + (saved,) = modules_setup() + try: + yield + finally: + modules_cleanup(saved) + + +def mock_register_at_fork(func): + # bpo-30599: Mock os.register_at_fork() when importing the random module, + # since this function doesn't allow to unregister callbacks and would leak + # memory. + from unittest import mock + return mock.patch('os.register_at_fork', create=True)(func) + + +@contextlib.contextmanager def ready_to_import(name=None, source=""): from test.support import script_helper diff -Nru python3.11-3.11.8/Lib/test/test_argparse.py python3.11-3.11.9/Lib/test/test_argparse.py --- python3.11-3.11.8/Lib/test/test_argparse.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_argparse.py 2024-04-02 08:25:04.000000000 +0000 @@ -2156,6 +2156,34 @@ (NS(foo=False, bar=0.5, w=7, x='b'), ['-W', '-X', 'Y', 'Z']), ) + def test_parse_known_args_with_single_dash_option(self): + parser = ErrorRaisingArgumentParser() + parser.add_argument('-k', '--known', action='count', default=0) + parser.add_argument('-n', '--new', action='count', default=0) + self.assertEqual(parser.parse_known_args(['-k', '-u']), + (NS(known=1, new=0), ['-u'])) + self.assertEqual(parser.parse_known_args(['-u', '-k']), + (NS(known=1, new=0), ['-u'])) + self.assertEqual(parser.parse_known_args(['-ku']), + (NS(known=1, new=0), ['-u'])) + self.assertArgumentParserError(parser.parse_known_args, ['-k=u']) + self.assertEqual(parser.parse_known_args(['-uk']), + (NS(known=0, new=0), ['-uk'])) + self.assertEqual(parser.parse_known_args(['-u=k']), + (NS(known=0, new=0), ['-u=k'])) + self.assertEqual(parser.parse_known_args(['-kunknown']), + (NS(known=1, new=0), ['-unknown'])) + self.assertArgumentParserError(parser.parse_known_args, ['-k=unknown']) + self.assertEqual(parser.parse_known_args(['-ku=nknown']), + (NS(known=1, new=0), ['-u=nknown'])) + self.assertEqual(parser.parse_known_args(['-knew']), + (NS(known=1, new=1), ['-ew'])) + self.assertArgumentParserError(parser.parse_known_args, ['-kn=ew']) + self.assertArgumentParserError(parser.parse_known_args, ['-k-new']) + self.assertArgumentParserError(parser.parse_known_args, ['-kn-ew']) + self.assertEqual(parser.parse_known_args(['-kne-w']), + (NS(known=1, new=1), ['-e-w'])) + def test_dest(self): parser = ErrorRaisingArgumentParser() parser.add_argument('--foo', action='store_true') @@ -2715,6 +2743,27 @@ ''' self.assertEqual(parser.format_help(), textwrap.dedent(expected)) + def test_help_subparser_all_mutually_exclusive_group_members_suppressed(self): + self.maxDiff = None + parser = ErrorRaisingArgumentParser(prog='PROG') + commands = parser.add_subparsers(title="commands", dest="command") + cmd_foo = commands.add_parser("foo") + group = cmd_foo.add_mutually_exclusive_group() + group.add_argument('--verbose', action='store_true', help=argparse.SUPPRESS) + group.add_argument('--quiet', action='store_true', help=argparse.SUPPRESS) + longopt = '--' + 'long'*32 + longmeta = 'LONG'*32 + cmd_foo.add_argument(longopt) + expected = f'''\ + usage: PROG foo [-h] + [{longopt} {longmeta}] + + options: + -h, --help show this help message and exit + {longopt} {longmeta} + ''' + self.assertEqual(cmd_foo.format_help(), textwrap.dedent(expected)) + def test_empty_group(self): # See issue 26952 parser = argparse.ArgumentParser() diff -Nru python3.11-3.11.8/Lib/test/test_ast.py python3.11-3.11.9/Lib/test/test_ast.py --- python3.11-3.11.8/Lib/test/test_ast.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_ast.py 2024-04-02 08:25:04.000000000 +0000 @@ -772,14 +772,6 @@ with self.assertRaises(SyntaxError): ast.parse('lambda x=1, /: ...', feature_version=(3, 7)) - def test_parenthesized_with_feature_version(self): - ast.parse('with (CtxManager() as example): ...', feature_version=(3, 10)) - # While advertised as a feature in Python 3.10, this was allowed starting 3.9 - ast.parse('with (CtxManager() as example): ...', feature_version=(3, 9)) - with self.assertRaises(SyntaxError): - ast.parse('with (CtxManager() as example): ...', feature_version=(3, 8)) - ast.parse('with CtxManager() as example: ...', feature_version=(3, 8)) - def test_debug_f_string_feature_version(self): ast.parse('f"{x=}"', feature_version=(3, 8)) with self.assertRaises(SyntaxError): @@ -790,6 +782,10 @@ with self.assertRaises(SyntaxError): ast.parse('(x := 0)', feature_version=(3, 7)) + def test_conditional_context_managers_parse_with_low_feature_version(self): + # regression test for gh-115881 + ast.parse('with (x() if y else z()): ...', feature_version=(3, 8)) + def test_exception_groups_feature_version(self): code = dedent(''' try: ... diff -Nru python3.11-3.11.8/Lib/test/test_asyncio/test_events.py python3.11-3.11.9/Lib/test/test_asyncio/test_events.py --- python3.11-3.11.8/Lib/test/test_asyncio/test_events.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_asyncio/test_events.py 2024-04-02 08:25:04.000000000 +0000 @@ -1352,6 +1352,80 @@ tr.close() self.loop.run_until_complete(pr.done) + def test_datagram_send_to_non_listening_address(self): + # see: + # https://github.com/python/cpython/issues/91227 + # https://github.com/python/cpython/issues/88906 + # https://bugs.python.org/issue47071 + # https://bugs.python.org/issue44743 + # The Proactor event loop would fail to receive datagram messages after + # sending a message to an address that wasn't listening. + loop = self.loop + + class Protocol(asyncio.DatagramProtocol): + + _received_datagram = None + + def datagram_received(self, data, addr): + self._received_datagram.set_result(data) + + async def wait_for_datagram_received(self): + self._received_datagram = loop.create_future() + result = await asyncio.wait_for(self._received_datagram, 10) + self._received_datagram = None + return result + + def create_socket(): + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + sock.setblocking(False) + sock.bind(('127.0.0.1', 0)) + return sock + + socket_1 = create_socket() + transport_1, protocol_1 = loop.run_until_complete( + loop.create_datagram_endpoint(Protocol, sock=socket_1) + ) + addr_1 = socket_1.getsockname() + + socket_2 = create_socket() + transport_2, protocol_2 = loop.run_until_complete( + loop.create_datagram_endpoint(Protocol, sock=socket_2) + ) + addr_2 = socket_2.getsockname() + + # creating and immediately closing this to try to get an address that + # is not listening + socket_3 = create_socket() + transport_3, protocol_3 = loop.run_until_complete( + loop.create_datagram_endpoint(Protocol, sock=socket_3) + ) + addr_3 = socket_3.getsockname() + transport_3.abort() + + transport_1.sendto(b'a', addr=addr_2) + self.assertEqual(loop.run_until_complete( + protocol_2.wait_for_datagram_received() + ), b'a') + + transport_2.sendto(b'b', addr=addr_1) + self.assertEqual(loop.run_until_complete( + protocol_1.wait_for_datagram_received() + ), b'b') + + # this should send to an address that isn't listening + transport_1.sendto(b'c', addr=addr_3) + loop.run_until_complete(asyncio.sleep(0)) + + # transport 1 should still be able to receive messages after sending to + # an address that wasn't listening + transport_2.sendto(b'd', addr=addr_1) + self.assertEqual(loop.run_until_complete( + protocol_1.wait_for_datagram_received() + ), b'd') + + transport_1.close() + transport_2.close() + def test_internal_fds(self): loop = self.create_event_loop() if not isinstance(loop, selector_events.BaseSelectorEventLoop): diff -Nru python3.11-3.11.8/Lib/test/test_asyncio/test_sock_lowlevel.py python3.11-3.11.9/Lib/test/test_asyncio/test_sock_lowlevel.py --- python3.11-3.11.8/Lib/test/test_asyncio/test_sock_lowlevel.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_asyncio/test_sock_lowlevel.py 2024-04-02 08:25:04.000000000 +0000 @@ -555,12 +555,93 @@ def create_event_loop(self): return asyncio.SelectorEventLoop() + class ProactorEventLoopTests(BaseSockTestsMixin, test_utils.TestCase): def create_event_loop(self): return asyncio.ProactorEventLoop() + + async def _basetest_datagram_send_to_non_listening_address(self, + recvfrom): + # see: + # https://github.com/python/cpython/issues/91227 + # https://github.com/python/cpython/issues/88906 + # https://bugs.python.org/issue47071 + # https://bugs.python.org/issue44743 + # The Proactor event loop would fail to receive datagram messages + # after sending a message to an address that wasn't listening. + + def create_socket(): + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + sock.setblocking(False) + sock.bind(('127.0.0.1', 0)) + return sock + + socket_1 = create_socket() + addr_1 = socket_1.getsockname() + + socket_2 = create_socket() + addr_2 = socket_2.getsockname() + + # creating and immediately closing this to try to get an address + # that is not listening + socket_3 = create_socket() + addr_3 = socket_3.getsockname() + socket_3.shutdown(socket.SHUT_RDWR) + socket_3.close() + + socket_1_recv_task = self.loop.create_task(recvfrom(socket_1)) + socket_2_recv_task = self.loop.create_task(recvfrom(socket_2)) + await asyncio.sleep(0) + + await self.loop.sock_sendto(socket_1, b'a', addr_2) + self.assertEqual(await socket_2_recv_task, b'a') + + await self.loop.sock_sendto(socket_2, b'b', addr_1) + self.assertEqual(await socket_1_recv_task, b'b') + socket_1_recv_task = self.loop.create_task(recvfrom(socket_1)) + await asyncio.sleep(0) + + # this should send to an address that isn't listening + await self.loop.sock_sendto(socket_1, b'c', addr_3) + self.assertEqual(await socket_1_recv_task, b'') + socket_1_recv_task = self.loop.create_task(recvfrom(socket_1)) + await asyncio.sleep(0) + + # socket 1 should still be able to receive messages after sending + # to an address that wasn't listening + socket_2.sendto(b'd', addr_1) + self.assertEqual(await socket_1_recv_task, b'd') + + socket_1.shutdown(socket.SHUT_RDWR) + socket_1.close() + socket_2.shutdown(socket.SHUT_RDWR) + socket_2.close() + + + def test_datagram_send_to_non_listening_address_recvfrom(self): + async def recvfrom(socket): + data, _ = await self.loop.sock_recvfrom(socket, 4096) + return data + + self.loop.run_until_complete( + self._basetest_datagram_send_to_non_listening_address( + recvfrom)) + + + def test_datagram_send_to_non_listening_address_recvfrom_into(self): + async def recvfrom_into(socket): + buf = bytearray(4096) + length, _ = await self.loop.sock_recvfrom_into(socket, buf, + 4096) + return buf[:length] + + self.loop.run_until_complete( + self._basetest_datagram_send_to_non_listening_address( + recvfrom_into)) + else: import selectors diff -Nru python3.11-3.11.8/Lib/test/test_asyncio/test_windows_events.py python3.11-3.11.9/Lib/test/test_asyncio/test_windows_events.py --- python3.11-3.11.8/Lib/test/test_asyncio/test_windows_events.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_asyncio/test_windows_events.py 2024-04-02 08:25:04.000000000 +0000 @@ -36,7 +36,23 @@ self.trans.close() -class ProactorLoopCtrlC(test_utils.TestCase): +class WindowsEventsTestCase(test_utils.TestCase): + def _unraisablehook(self, unraisable): + # Storing unraisable.object can resurrect an object which is being + # finalized. Storing unraisable.exc_value creates a reference cycle. + self._unraisable = unraisable + print(unraisable) + + def setUp(self): + self._prev_unraisablehook = sys.unraisablehook + self._unraisable = None + sys.unraisablehook = self._unraisablehook + + def tearDown(self): + sys.unraisablehook = self._prev_unraisablehook + self.assertIsNone(self._unraisable) + +class ProactorLoopCtrlC(WindowsEventsTestCase): def test_ctrl_c(self): @@ -58,7 +74,7 @@ thread.join() -class ProactorMultithreading(test_utils.TestCase): +class ProactorMultithreading(WindowsEventsTestCase): def test_run_from_nonmain_thread(self): finished = False @@ -79,7 +95,7 @@ self.assertTrue(finished) -class ProactorTests(test_utils.TestCase): +class ProactorTests(WindowsEventsTestCase): def setUp(self): super().setUp() @@ -283,8 +299,32 @@ return "done" + def test_loop_restart(self): + # We're fishing for the "RuntimeError: <_overlapped.Overlapped object at XXX> + # still has pending operation at deallocation, the process may crash" error + stop = threading.Event() + def threadMain(): + while not stop.is_set(): + self.loop.call_soon_threadsafe(lambda: None) + time.sleep(0.01) + thr = threading.Thread(target=threadMain) + + # In 10 60-second runs of this test prior to the fix: + # time in seconds until failure: (none), 15.0, 6.4, (none), 7.6, 8.3, 1.7, 22.2, 23.5, 8.3 + # 10 seconds had a 50% failure rate but longer would be more costly + end_time = time.time() + 10 # Run for 10 seconds + self.loop.call_soon(thr.start) + while not self._unraisable: # Stop if we got an unraisable exc + self.loop.stop() + self.loop.run_forever() + if time.time() >= end_time: + break + + stop.set() + thr.join() + -class WinPolicyTests(test_utils.TestCase): +class WinPolicyTests(WindowsEventsTestCase): def test_selector_win_policy(self): async def main(): diff -Nru python3.11-3.11.8/Lib/test/test_baseexception.py python3.11-3.11.9/Lib/test/test_baseexception.py --- python3.11-3.11.8/Lib/test/test_baseexception.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_baseexception.py 2024-04-02 08:25:04.000000000 +0000 @@ -129,7 +129,7 @@ d[HashThisKeyWillClearTheDict()] = Value() # refcount of Value() is 1 now - # Exception.__setstate__ should aquire a strong reference of key and + # Exception.__setstate__ should acquire a strong reference of key and # value in the dict. Otherwise, Value()'s refcount would go below # zero in the tp_hash call in PyObject_SetAttr(), and it would cause # crash in GC. diff -Nru python3.11-3.11.8/Lib/test/test_builtin.py python3.11-3.11.9/Lib/test/test_builtin.py --- python3.11-3.11.8/Lib/test/test_builtin.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_builtin.py 2024-04-02 08:25:04.000000000 +0000 @@ -577,6 +577,14 @@ self.assertIsInstance(res, list) self.assertTrue(res == ["a", "b", "c"]) + # dir(obj__dir__iterable) + class Foo(object): + def __dir__(self): + return {"b", "c", "a"} + res = dir(Foo()) + self.assertIsInstance(res, list) + self.assertEqual(sorted(res), ["a", "b", "c"]) + # dir(obj__dir__not_sequence) class Foo(object): def __dir__(self): diff -Nru python3.11-3.11.8/Lib/test/test_bz2.py python3.11-3.11.9/Lib/test/test_bz2.py --- python3.11-3.11.8/Lib/test/test_bz2.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_bz2.py 2024-04-02 08:25:04.000000000 +0000 @@ -3,19 +3,19 @@ import array import unittest +import io from io import BytesIO, DEFAULT_BUFFER_SIZE import os import pickle import glob import tempfile -import pathlib import random import shutil import subprocess import threading from test.support import import_helper from test.support import threading_helper -from test.support.os_helper import unlink +from test.support.os_helper import unlink, FakePath import _compression import sys @@ -537,12 +537,136 @@ with BZ2File(self.filename) as bz2f: self.assertEqual(bz2f.read(), data1 + data2) + def testOpenFilename(self): + with BZ2File(self.filename, "wb") as f: + f.write(b'content') + self.assertIsInstance(f.fileno(), int) + self.assertIs(f.readable(), False) + self.assertIs(f.writable(), True) + self.assertIs(f.seekable(), False) + self.assertIs(f.closed, False) + self.assertIs(f.closed, True) + self.assertRaises(ValueError, f.fileno) + self.assertRaises(ValueError, f.readable) + self.assertRaises(ValueError, f.writable) + self.assertRaises(ValueError, f.seekable) + + with BZ2File(self.filename, "ab") as f: + f.write(b'appendix') + self.assertIsInstance(f.fileno(), int) + self.assertIs(f.readable(), False) + self.assertIs(f.writable(), True) + self.assertIs(f.seekable(), False) + self.assertIs(f.closed, False) + self.assertIs(f.closed, True) + self.assertRaises(ValueError, f.fileno) + self.assertRaises(ValueError, f.readable) + self.assertRaises(ValueError, f.writable) + self.assertRaises(ValueError, f.seekable) + + with BZ2File(self.filename, 'rb') as f: + self.assertEqual(f.read(), b'contentappendix') + self.assertIsInstance(f.fileno(), int) + self.assertIs(f.readable(), True) + self.assertIs(f.writable(), False) + self.assertIs(f.seekable(), True) + self.assertIs(f.closed, False) + self.assertIs(f.closed, True) + with self.assertRaises(ValueError): + f.fileno() + self.assertRaises(ValueError, f.readable) + self.assertRaises(ValueError, f.writable) + self.assertRaises(ValueError, f.seekable) + + def testOpenFileWithName(self): + with open(self.filename, 'wb') as raw: + with BZ2File(raw, 'wb') as f: + f.write(b'content') + self.assertEqual(f.fileno(), raw.fileno()) + self.assertIs(f.readable(), False) + self.assertIs(f.writable(), True) + self.assertIs(f.seekable(), False) + self.assertIs(f.closed, False) + self.assertIs(f.closed, True) + self.assertRaises(ValueError, f.fileno) + self.assertRaises(ValueError, f.readable) + self.assertRaises(ValueError, f.writable) + self.assertRaises(ValueError, f.seekable) + + with open(self.filename, 'ab') as raw: + with BZ2File(raw, 'ab') as f: + f.write(b'appendix') + self.assertEqual(f.fileno(), raw.fileno()) + self.assertIs(f.readable(), False) + self.assertIs(f.writable(), True) + self.assertIs(f.seekable(), False) + self.assertIs(f.closed, False) + self.assertIs(f.closed, True) + self.assertRaises(ValueError, f.fileno) + self.assertRaises(ValueError, f.readable) + self.assertRaises(ValueError, f.writable) + self.assertRaises(ValueError, f.seekable) + + with open(self.filename, 'rb') as raw: + with BZ2File(raw, 'rb') as f: + self.assertEqual(f.read(), b'contentappendix') + self.assertEqual(f.fileno(), raw.fileno()) + self.assertIs(f.readable(), True) + self.assertIs(f.writable(), False) + self.assertIs(f.seekable(), True) + self.assertIs(f.closed, False) + self.assertIs(f.closed, True) + with self.assertRaises(ValueError): + f.fileno() + self.assertRaises(ValueError, f.readable) + self.assertRaises(ValueError, f.writable) + self.assertRaises(ValueError, f.seekable) + + def testOpenFileWithoutName(self): + bio = BytesIO() + with BZ2File(bio, 'wb') as f: + f.write(b'content') + self.assertRaises(io.UnsupportedOperation, f.fileno) + self.assertRaises(ValueError, f.fileno) + + with BZ2File(bio, 'ab') as f: + f.write(b'appendix') + self.assertRaises(io.UnsupportedOperation, f.fileno) + self.assertRaises(ValueError, f.fileno) + + bio.seek(0) + with BZ2File(bio, 'rb') as f: + self.assertEqual(f.read(), b'contentappendix') + self.assertRaises(io.UnsupportedOperation, f.fileno) + with self.assertRaises(ValueError): + f.fileno() + + def testOpenFileWithIntName(self): + fd = os.open(self.filename, os.O_WRONLY | os.O_CREAT | os.O_TRUNC) + with open(fd, 'wb') as raw: + with BZ2File(raw, 'wb') as f: + f.write(b'content') + self.assertEqual(f.fileno(), raw.fileno()) + self.assertRaises(ValueError, f.fileno) + + fd = os.open(self.filename, os.O_WRONLY | os.O_CREAT | os.O_APPEND) + with open(fd, 'ab') as raw: + with BZ2File(raw, 'ab') as f: + f.write(b'appendix') + self.assertEqual(f.fileno(), raw.fileno()) + self.assertRaises(ValueError, f.fileno) + + fd = os.open(self.filename, os.O_RDONLY) + with open(fd, 'rb') as raw: + with BZ2File(raw, 'rb') as f: + self.assertEqual(f.read(), b'contentappendix') + self.assertEqual(f.fileno(), raw.fileno()) + with self.assertRaises(ValueError): + f.fileno() + def testOpenBytesFilename(self): str_filename = self.filename - try: - bytes_filename = str_filename.encode("ascii") - except UnicodeEncodeError: - self.skipTest("Temporary file name needs to be ASCII") + bytes_filename = os.fsencode(str_filename) with BZ2File(bytes_filename, "wb") as f: f.write(self.DATA) with BZ2File(bytes_filename, "rb") as f: @@ -552,7 +676,7 @@ self.assertEqual(f.read(), self.DATA) def testOpenPathLikeFilename(self): - filename = pathlib.Path(self.filename) + filename = FakePath(self.filename) with BZ2File(filename, "wb") as f: f.write(self.DATA) with BZ2File(filename, "rb") as f: diff -Nru python3.11-3.11.8/Lib/test/test_capi/test_structmembers.py python3.11-3.11.9/Lib/test/test_capi/test_structmembers.py --- python3.11-3.11.8/Lib/test/test_capi/test_structmembers.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_capi/test_structmembers.py 2024-04-02 08:25:04.000000000 +0000 @@ -69,36 +69,22 @@ self._test_warn(name, maxval+1, minval) self._test_warn(name, hardmaxval) - if indexlimit is None: - indexlimit = hardlimit - if not indexlimit: + if indexlimit is False: self.assertRaises(TypeError, setattr, ts, name, Index(minval)) self.assertRaises(TypeError, setattr, ts, name, Index(maxval)) else: - hardminindexval, hardmaxindexval = indexlimit self._test_write(name, Index(minval), minval) - if minval < hardminindexval: - self._test_write(name, Index(hardminindexval), hardminindexval) - if maxval < hardmaxindexval: - self._test_write(name, Index(maxval), maxval) - else: - self._test_write(name, Index(hardmaxindexval), hardmaxindexval) - self._test_overflow(name, Index(hardminindexval-1)) - if name in ('T_UINT', 'T_ULONG'): - self.assertRaises(TypeError, setattr, ts, name, - Index(hardmaxindexval+1)) - self.assertRaises(TypeError, setattr, ts, name, - Index(2**1000)) - else: - self._test_overflow(name, Index(hardmaxindexval+1)) - self._test_overflow(name, Index(2**1000)) + self._test_write(name, Index(maxval), maxval) + self._test_overflow(name, Index(hardminval-1)) + self._test_overflow(name, Index(hardmaxval+1)) + self._test_overflow(name, Index(2**1000)) self._test_overflow(name, Index(-2**1000)) - if hardminindexval < minval and name != 'T_ULONGLONG': - self._test_warn(name, Index(hardminindexval)) - self._test_warn(name, Index(minval-1)) - if maxval < hardmaxindexval: - self._test_warn(name, Index(maxval+1)) - self._test_warn(name, Index(hardmaxindexval)) + if hardminval < minval: + self._test_warn(name, Index(hardminval)) + self._test_warn(name, Index(minval-1), maxval) + if maxval < hardmaxval: + self._test_warn(name, Index(maxval+1), minval) + self._test_warn(name, Index(hardmaxval)) def test_bool(self): ts.T_BOOL = True @@ -125,14 +111,12 @@ self._test_int_range('T_INT', INT_MIN, INT_MAX, hardlimit=(LONG_MIN, LONG_MAX)) self._test_int_range('T_UINT', 0, UINT_MAX, - hardlimit=(LONG_MIN, ULONG_MAX), - indexlimit=(LONG_MIN, LONG_MAX)) + hardlimit=(LONG_MIN, ULONG_MAX)) def test_long(self): self._test_int_range('T_LONG', LONG_MIN, LONG_MAX) self._test_int_range('T_ULONG', 0, ULONG_MAX, - hardlimit=(LONG_MIN, ULONG_MAX), - indexlimit=(LONG_MIN, LONG_MAX)) + hardlimit=(LONG_MIN, ULONG_MAX)) def test_py_ssize_t(self): self._test_int_range('T_PYSSIZET', PY_SSIZE_T_MIN, PY_SSIZE_T_MAX, indexlimit=False) @@ -141,7 +125,7 @@ def test_longlong(self): self._test_int_range('T_LONGLONG', LLONG_MIN, LLONG_MAX) self._test_int_range('T_ULONGLONG', 0, ULLONG_MAX, - indexlimit=(LONG_MIN, LONG_MAX)) + hardlimit=(LONG_MIN, ULLONG_MAX)) def test_bad_assignments(self): integer_attributes = [ diff -Nru python3.11-3.11.8/Lib/test/test_clinic.py python3.11-3.11.9/Lib/test/test_clinic.py --- python3.11-3.11.8/Lib/test/test_clinic.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_clinic.py 2024-04-02 08:25:04.000000000 +0000 @@ -22,6 +22,20 @@ from clinic import DSLParser +def restore_dict(converters, old_converters): + converters.clear() + converters.update(old_converters) + + +def save_restore_converters(testcase): + testcase.addCleanup(restore_dict, clinic.converters, + clinic.converters.copy()) + testcase.addCleanup(restore_dict, clinic.legacy_converters, + clinic.legacy_converters.copy()) + testcase.addCleanup(restore_dict, clinic.return_converters, + clinic.return_converters.copy()) + + class _ParserBase(TestCase): maxDiff = None @@ -108,6 +122,7 @@ class ClinicWholeFileTest(_ParserBase): def setUp(self): + save_restore_converters(self) self.clinic = clinic.Clinic(clinic.CLanguage(None), filename="test.c") def expect_failure(self, raw): @@ -1317,6 +1332,9 @@ maxDiff = None clinic_py = os.path.join(test_tools.toolsdir, "clinic", "clinic.py") + def setUp(self): + save_restore_converters(self) + def _do_test(self, *args, expect_success=True): with subprocess.Popen( [sys.executable, "-Xutf8", self.clinic_py, *args], diff -Nru python3.11-3.11.8/Lib/test/test_cmd_line.py python3.11-3.11.9/Lib/test/test_cmd_line.py --- python3.11-3.11.8/Lib/test/test_cmd_line.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_cmd_line.py 2024-04-02 08:25:04.000000000 +0000 @@ -41,6 +41,7 @@ self.assertNotIn(b'Traceback', err) return out + @support.cpython_only def test_help(self): self.verify_valid_flag('-h') self.verify_valid_flag('-?') @@ -51,14 +52,17 @@ self.assertNotIn(b'-X dev', out) self.assertLess(len(lines), 50) + @support.cpython_only def test_help_env(self): out = self.verify_valid_flag('--help-env') self.assertIn(b'PYTHONHOME', out) + @support.cpython_only def test_help_xoptions(self): out = self.verify_valid_flag('--help-xoptions') self.assertIn(b'-X dev', out) + @support.cpython_only def test_help_all(self): out = self.verify_valid_flag('--help-all') lines = out.splitlines() @@ -77,6 +81,7 @@ def test_site_flag(self): self.verify_valid_flag('-S') + @support.cpython_only def test_version(self): version = ('Python %d.%d' % sys.version_info[:2]).encode("ascii") for switch in '-V', '--version', '-VV': @@ -142,6 +147,7 @@ else: self.assertEqual(err, b'') + @support.cpython_only def test_xoption_frozen_modules(self): tests = { ('=on', 'FrozenImporter'), @@ -570,6 +576,7 @@ print("del sys.modules['__main__']", file=script) assert_python_ok(filename) + @support.cpython_only def test_unknown_options(self): rc, out, err = assert_python_failure('-E', '-z') self.assertIn(b'Unknown option: -z', err) @@ -684,6 +691,7 @@ self.assertEqual(proc.returncode, 0, proc) return proc.stdout.rstrip() + @support.cpython_only def test_xdev(self): # sys.flags.dev_mode code = "import sys; print(sys.flags.dev_mode)" @@ -858,6 +866,7 @@ self.assertEqual(proc.returncode, 0, proc) self.assertEqual(proc.stdout.strip(), b'0') + @support.cpython_only def test_parsing_error(self): args = [sys.executable, '-I', '--unknown-option'] proc = subprocess.run(args, diff -Nru python3.11-3.11.8/Lib/test/test_compile.py python3.11-3.11.9/Lib/test/test_compile.py --- python3.11-3.11.8/Lib/test/test_compile.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_compile.py 2024-04-02 08:25:04.000000000 +0000 @@ -704,7 +704,7 @@ def f1(): "docstring" return 42 - self.assertEqual(f1.__code__.co_consts, ("docstring", 42)) + self.assertEqual(f1.__code__.co_consts, (f1.__doc__, 42)) # This is a regression test for a CPython specific peephole optimizer # implementation bug present in a few releases. It's assertion verifies @@ -935,6 +935,8 @@ for func in (no_code1, no_code2): with self.subTest(func=func): + if func is no_code1 and no_code1.__doc__ is None: + continue code = func.__code__ lines = list(code.co_lines()) start, end, line = lines[0] diff -Nru python3.11-3.11.8/Lib/test/test_concurrent_futures/test_shutdown.py python3.11-3.11.9/Lib/test/test_concurrent_futures/test_shutdown.py --- python3.11-3.11.8/Lib/test/test_concurrent_futures/test_shutdown.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_concurrent_futures/test_shutdown.py 2024-04-02 08:25:04.000000000 +0000 @@ -247,7 +247,9 @@ # Errors in atexit hooks don't change the process exit code, check # stderr manually. self.assertFalse(err) - self.assertEqual(out.strip(), b"apple") + # gh-116682: stdout may be empty if shutdown happens before task + # starts executing. + self.assertIn(out.strip(), [b"apple", b""]) class ProcessPoolShutdownTest(ExecutorShutdownTest): diff -Nru python3.11-3.11.8/Lib/test/test_configparser.py python3.11-3.11.9/Lib/test/test_configparser.py --- python3.11-3.11.8/Lib/test/test_configparser.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_configparser.py 2024-04-02 08:25:04.000000000 +0000 @@ -647,6 +647,21 @@ "'opt' in section 'Bar' already exists") self.assertEqual(e.args, ("Bar", "opt", "", None)) + def test_get_after_duplicate_option_error(self): + cf = self.newconfig() + ini = textwrap.dedent("""\ + [Foo] + x{equals}1 + y{equals}2 + y{equals}3 + """.format(equals=self.delimiters[0])) + if self.strict: + with self.assertRaises(configparser.DuplicateOptionError): + cf.read_string(ini) + else: + cf.read_string(ini) + self.assertEqual(cf.get('Foo', 'x'), '1') + def test_write(self): config_string = ( "[Long Line]\n" diff -Nru python3.11-3.11.8/Lib/test/test_copy.py python3.11-3.11.9/Lib/test/test_copy.py --- python3.11-3.11.8/Lib/test/test_copy.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_copy.py 2024-04-02 08:25:04.000000000 +0000 @@ -91,9 +91,7 @@ # Type-specific _copy_xxx() methods def test_copy_atomic(self): - class Classic: - pass - class NewStyle(object): + class NewStyle: pass def f(): pass @@ -103,7 +101,7 @@ 42, 2**100, 3.14, True, False, 1j, "hello", "hello\u1234", f.__code__, b"world", bytes(range(256)), range(10), slice(1, 10, 2), - NewStyle, Classic, max, WithMetaclass, property()] + NewStyle, max, WithMetaclass, property()] for x in tests: self.assertIs(copy.copy(x), x) @@ -356,15 +354,13 @@ # Type-specific _deepcopy_xxx() methods def test_deepcopy_atomic(self): - class Classic: - pass - class NewStyle(object): + class NewStyle: pass def f(): pass tests = [None, ..., NotImplemented, 42, 2**100, 3.14, True, False, 1j, b"bytes", "hello", "hello\u1234", f.__code__, - NewStyle, range(10), Classic, max, property()] + NewStyle, range(10), max, property()] for x in tests: self.assertIs(copy.deepcopy(x), x) diff -Nru python3.11-3.11.8/Lib/test/test_csv.py python3.11-3.11.9/Lib/test/test_csv.py --- python3.11-3.11.8/Lib/test/test_csv.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_csv.py 2024-04-02 08:25:04.000000000 +0000 @@ -46,6 +46,20 @@ quoting=csv.QUOTE_ALL, quotechar=None) self.assertRaises(TypeError, ctor, arg, quoting=csv.QUOTE_NONE, quotechar='') + ctor(arg, delimiter=' ') + ctor(arg, escapechar=' ') + ctor(arg, quotechar=' ') + ctor(arg, delimiter='\t', skipinitialspace=True) + ctor(arg, escapechar='\t', skipinitialspace=True) + ctor(arg, quotechar='\t', skipinitialspace=True) + ctor(arg, delimiter=' ', skipinitialspace=True) + ctor(arg, delimiter='^') + ctor(arg, escapechar='^') + ctor(arg, quotechar='^') + ctor(arg, delimiter='\x85') + ctor(arg, escapechar='\x85') + ctor(arg, quotechar='\x85') + ctor(arg, lineterminator='\x85') def test_reader_arg_valid(self): self._test_arg_valid(csv.reader, []) @@ -152,9 +166,6 @@ def test_write_arg_valid(self): self._write_error_test(csv.Error, None) - self._write_test((), '') - self._write_test([None], '""') - self._write_error_test(csv.Error, [None], quoting = csv.QUOTE_NONE) # Check that exceptions are passed up the chain self._write_error_test(OSError, BadIterable()) class BadList: @@ -226,9 +237,11 @@ writer = csv.writer(sio, lineterminator=lineterminator) writer.writerow(['a', 'b']) writer.writerow([1, 2]) + writer.writerow(['\r', '\n']) self.assertEqual(sio.getvalue(), f'a,b{lineterminator}' - f'1,2{lineterminator}') + f'1,2{lineterminator}' + f'"\r","\n"{lineterminator}') def test_write_iterable(self): self._write_test(iter(['a', 1, 'p,q']), 'a,1,"p,q"') @@ -271,6 +284,38 @@ fileobj.seek(0) self.assertEqual(fileobj.read(), 'a\r\n""\r\n') + def test_write_empty_fields(self): + self._write_test((), '') + self._write_test([''], '""') + self._write_error_test(csv.Error, [''], quoting=csv.QUOTE_NONE) + self._write_test([None], '""') + self._write_error_test(csv.Error, [None], quoting=csv.QUOTE_NONE) + self._write_test(['', ''], ',') + self._write_test([None, None], ',') + + def test_write_empty_fields_space_delimiter(self): + self._write_test([''], '""', delimiter=' ', skipinitialspace=False) + self._write_test([''], '""', delimiter=' ', skipinitialspace=True) + self._write_test([None], '""', delimiter=' ', skipinitialspace=False) + self._write_test([None], '""', delimiter=' ', skipinitialspace=True) + + self._write_test(['', ''], ' ', delimiter=' ', skipinitialspace=False) + self._write_test(['', ''], '"" ""', delimiter=' ', skipinitialspace=True) + self._write_test([None, None], ' ', delimiter=' ', skipinitialspace=False) + self._write_test([None, None], '"" ""', delimiter=' ', skipinitialspace=True) + + self._write_test(['', ''], ' ', delimiter=' ', skipinitialspace=False, + quoting=csv.QUOTE_NONE) + self._write_error_test(csv.Error, ['', ''], + delimiter=' ', skipinitialspace=True, + quoting=csv.QUOTE_NONE) + + self._write_test([None, None], ' ', delimiter=' ', skipinitialspace=False, + quoting=csv.QUOTE_NONE) + self._write_error_test(csv.Error, [None, None], + delimiter=' ', skipinitialspace=True, + quoting=csv.QUOTE_NONE) + def test_writerows_errors(self): with TemporaryFile("w+", encoding="utf-8", newline='') as fileobj: writer = csv.writer(fileobj) @@ -372,6 +417,14 @@ [['no space', 'space', 'spaces', '\ttab']], skipinitialspace=True) + def test_read_space_delimiter(self): + self._read_test(['a b', ' a ', ' ', ''], + [['a', '', '', 'b'], ['', '', 'a', '', ''], ['', '', ''], []], + delimiter=' ', skipinitialspace=False) + self._read_test(['a b', ' a ', ' ', ''], + [['a', 'b'], ['a', ''], [''], []], + delimiter=' ', skipinitialspace=True) + def test_read_bigfield(self): # This exercises the buffer realloc functionality and field size # limits. @@ -404,22 +457,44 @@ self.assertEqual(r.line_num, 3) def test_roundtrip_quoteed_newlines(self): - with TemporaryFile("w+", encoding="utf-8", newline='') as fileobj: - writer = csv.writer(fileobj) - rows = [['a\nb','b'],['c','x\r\nd']] - writer.writerows(rows) - fileobj.seek(0) - for i, row in enumerate(csv.reader(fileobj)): - self.assertEqual(row, rows[i]) + rows = [ + ['\na', 'b\nc', 'd\n'], + ['\re', 'f\rg', 'h\r'], + ['\r\ni', 'j\r\nk', 'l\r\n'], + ['\n\rm', 'n\n\ro', 'p\n\r'], + ['\r\rq', 'r\r\rs', 't\r\r'], + ['\n\nu', 'v\n\nw', 'x\n\n'], + ] + for lineterminator in '\r\n', '\n', '\r': + with self.subTest(lineterminator=lineterminator): + with TemporaryFile("w+", encoding="utf-8", newline='') as fileobj: + writer = csv.writer(fileobj, lineterminator=lineterminator) + writer.writerows(rows) + fileobj.seek(0) + for i, row in enumerate(csv.reader(fileobj)): + self.assertEqual(row, rows[i]) def test_roundtrip_escaped_unquoted_newlines(self): - with TemporaryFile("w+", encoding="utf-8", newline='') as fileobj: - writer = csv.writer(fileobj,quoting=csv.QUOTE_NONE,escapechar="\\") - rows = [['a\nb','b'],['c','x\r\nd']] - writer.writerows(rows) - fileobj.seek(0) - for i, row in enumerate(csv.reader(fileobj,quoting=csv.QUOTE_NONE,escapechar="\\")): - self.assertEqual(row,rows[i]) + rows = [ + ['\na', 'b\nc', 'd\n'], + ['\re', 'f\rg', 'h\r'], + ['\r\ni', 'j\r\nk', 'l\r\n'], + ['\n\rm', 'n\n\ro', 'p\n\r'], + ['\r\rq', 'r\r\rs', 't\r\r'], + ['\n\nu', 'v\n\nw', 'x\n\n'], + ] + for lineterminator in '\r\n', '\n', '\r': + with self.subTest(lineterminator=lineterminator): + with TemporaryFile("w+", encoding="utf-8", newline='') as fileobj: + writer = csv.writer(fileobj, lineterminator=lineterminator, + quoting=csv.QUOTE_NONE, escapechar="\\") + writer.writerows(rows) + fileobj.seek(0) + for i, row in enumerate(csv.reader(fileobj, + quoting=csv.QUOTE_NONE, + escapechar="\\")): + self.assertEqual(row, rows[i]) + class TestDialectRegistry(unittest.TestCase): def test_registry_badargs(self): @@ -498,10 +573,10 @@ escapechar = "\\" with TemporaryFile("w+", encoding="utf-8") as fileobj: - fileobj.write("abc def\nc1ccccc1 benzene\n") + fileobj.write("abc def\nc1ccccc1 benzene\n") fileobj.seek(0) reader = csv.reader(fileobj, dialect=space()) - self.assertEqual(next(reader), ["abc", "def"]) + self.assertEqual(next(reader), ["abc", "", "", "def"]) self.assertEqual(next(reader), ["c1ccccc1", "benzene"]) def compare_dialect_123(self, expected, *writeargs, **kwwriteargs): diff -Nru python3.11-3.11.8/Lib/test/test_dataclasses/__init__.py python3.11-3.11.9/Lib/test/test_dataclasses/__init__.py --- python3.11-3.11.8/Lib/test/test_dataclasses/__init__.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_dataclasses/__init__.py 2024-04-02 08:25:04.000000000 +0000 @@ -22,6 +22,8 @@ import typing # Needed for the string "typing.ClassVar[int]" to work as an annotation. import dataclasses # Needed for the string "dataclasses.InitVar[int]" to work as an annotation. +from test import support + # Just any custom exception we can catch. class CustomError(Exception): pass @@ -2143,6 +2145,7 @@ # whitespace stripped. self.assertEqual(a.replace(' ', ''), b.replace(' ', '')) + @support.requires_docstrings def test_existing_docstring_not_overridden(self): @dataclass class C: @@ -3284,6 +3287,17 @@ self.assertIs(a.__weakref__, a_ref) + def test_dataclass_derived_weakref_slot(self): + class A: + pass + + @dataclass(slots=True, weakref_slot=True) + class B(A): + pass + + B() + + class TestDescriptors(unittest.TestCase): def test_set_name(self): # See bpo-33141. diff -Nru python3.11-3.11.8/Lib/test/test_decimal.py python3.11-3.11.9/Lib/test/test_decimal.py --- python3.11-3.11.8/Lib/test/test_decimal.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_decimal.py 2024-04-02 08:25:04.000000000 +0000 @@ -37,7 +37,8 @@ requires_legacy_unicode_capi, check_sanitizer) from test.support import (TestFailed, run_with_locale, cpython_only, - darwin_malloc_err_warning, is_emscripten) + darwin_malloc_err_warning, is_emscripten, + skip_on_s390x) from test.support.import_helper import import_fresh_module from test.support import threading_helper from test.support import warnings_helper @@ -1121,6 +1122,13 @@ ('z>z6.1f', '-0.', 'zzz0.0'), ('x>z6.1f', '-0.', 'xxx0.0'), ('🖤>z6.1f', '-0.', '🖤🖤🖤0.0'), # multi-byte fill char + ('\x00>z6.1f', '-0.', '\x00\x00\x000.0'), # null fill char + + # issue 114563 ('z' format on F type in cdecimal) + ('z3,.10F', '-6.24E-323', '0.0000000000'), + + # issue 91060 ('#' format in cdecimal) + ('#', '0', '0.'), # issue 6850 ('a=-7.0', '0.12345', 'aaaa0.1'), @@ -5647,6 +5655,9 @@ @unittest.skipIf(check_sanitizer(address=True, memory=True), "ASAN/MSAN sanitizer defaults to crashing " "instead of returning NULL for malloc failure.") + # gh-114331: The test allocates 784 271 641 GiB and mimalloc does not fail + # to allocate it when using mimalloc on s390x. + @skip_on_s390x def test_maxcontext_exact_arith(self): # Make sure that exact operations do not raise MemoryError due @@ -5712,6 +5723,21 @@ with self.assertRaisesRegex(ValueError, err_msg): sd.copy() + def test_format_fallback_capitals(self): + # Fallback to _pydecimal formatting (triggered by `#` format which + # is unsupported by mpdecimal) should honor the current context. + x = C.Decimal('6.09e+23') + self.assertEqual(format(x, '#'), '6.09E+23') + with C.localcontext(capitals=0): + self.assertEqual(format(x, '#'), '6.09e+23') + + def test_format_fallback_rounding(self): + y = C.Decimal('6.09') + self.assertEqual(format(y, '#.1f'), '6.1') + with C.localcontext(rounding=C.ROUND_DOWN): + self.assertEqual(format(y, '#.1f'), '6.0') + + @requires_docstrings @requires_cdecimal class SignatureTest(unittest.TestCase): diff -Nru python3.11-3.11.8/Lib/test/test_deque.py python3.11-3.11.9/Lib/test/test_deque.py --- python3.11-3.11.8/Lib/test/test_deque.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_deque.py 2024-04-02 08:25:04.000000000 +0000 @@ -166,7 +166,7 @@ with self.assertRaises(RuntimeError): n in d - def test_contains_count_stop_crashes(self): + def test_contains_count_index_stop_crashes(self): class A: def __eq__(self, other): d.clear() @@ -178,6 +178,10 @@ with self.assertRaises(RuntimeError): _ = d.count(3) + d = deque([A()]) + with self.assertRaises(RuntimeError): + d.index(0) + def test_extend(self): d = deque('a') self.assertRaises(TypeError, d.extend, 1) diff -Nru python3.11-3.11.8/Lib/test/test_descr.py python3.11-3.11.9/Lib/test/test_descr.py --- python3.11-3.11.8/Lib/test/test_descr.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_descr.py 2024-04-02 08:25:04.000000000 +0000 @@ -1587,7 +1587,11 @@ cm = classmethod(f) cm_dict = {'__annotations__': {}, - '__doc__': "f docstring", + '__doc__': ( + "f docstring" + if support.HAVE_DOCSTRINGS + else None + ), '__module__': __name__, '__name__': 'f', '__qualname__': f.__qualname__} @@ -3252,12 +3256,8 @@ if otype: otype = otype.__name__ return 'object=%s; type=%s' % (object, otype) - class OldClass: + class NewClass: __doc__ = DocDescr() - class NewClass(object): - __doc__ = DocDescr() - self.assertEqual(OldClass.__doc__, 'object=None; type=OldClass') - self.assertEqual(OldClass().__doc__, 'object=OldClass instance; type=OldClass') self.assertEqual(NewClass.__doc__, 'object=None; type=NewClass') self.assertEqual(NewClass().__doc__, 'object=NewClass instance; type=NewClass') diff -Nru python3.11-3.11.8/Lib/test/test_descrtut.py python3.11-3.11.9/Lib/test/test_descrtut.py --- python3.11-3.11.8/Lib/test/test_descrtut.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_descrtut.py 2024-04-02 08:25:04.000000000 +0000 @@ -40,16 +40,16 @@ Here's the new type at work: >>> print(defaultdict) # show our type - + >>> print(type(defaultdict)) # its metatype >>> a = defaultdict(default=0.0) # create an instance >>> print(a) # show the instance {} >>> print(type(a)) # show its type - + >>> print(a.__class__) # show its class - + >>> print(type(a) is a.__class__) # its type is its class True >>> a[1] = 3.25 # modify the instance @@ -100,7 +100,7 @@ >>> print(sortdict(a.__dict__)) {'default': -1000, 'x1': 100, 'x2': 200} >>> -""" +""" % {'modname': __name__} class defaultdict2(dict): __slots__ = ['default'] @@ -264,19 +264,19 @@ ... print("classmethod", cls, y) >>> C.foo(1) - classmethod 1 + classmethod 1 >>> c = C() >>> c.foo(1) - classmethod 1 + classmethod 1 >>> class D(C): ... pass >>> D.foo(1) - classmethod 1 + classmethod 1 >>> d = D() >>> d.foo(1) - classmethod 1 + classmethod 1 This prints "classmethod __main__.D 1" both times; in other words, the class passed as the first argument of foo() is the class involved in the @@ -292,18 +292,18 @@ >>> E.foo(1) E.foo() called - classmethod 1 + classmethod 1 >>> e = E() >>> e.foo(1) E.foo() called - classmethod 1 + classmethod 1 In this example, the call to C.foo() from E.foo() will see class C as its first argument, not class E. This is to be expected, since the call specifies the class C. But it stresses the difference between these class methods and methods defined in metaclasses (where an upcall to a metamethod would pass the target class as an explicit first argument). -""" +""" % {'modname': __name__} test_5 = """ diff -Nru python3.11-3.11.8/Lib/test/test_doctest/decorator_mod.py python3.11-3.11.9/Lib/test/test_doctest/decorator_mod.py --- python3.11-3.11.8/Lib/test/test_doctest/decorator_mod.py 1970-01-01 00:00:00.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_doctest/decorator_mod.py 2024-04-02 08:25:04.000000000 +0000 @@ -0,0 +1,10 @@ +# This module is used in `doctest_lineno.py`. +import functools + + +def decorator(f): + @functools.wraps(f) + def inner(): + return f() + + return inner diff -Nru python3.11-3.11.8/Lib/test/test_doctest/doctest_lineno.py python3.11-3.11.9/Lib/test/test_doctest/doctest_lineno.py --- python3.11-3.11.8/Lib/test/test_doctest/doctest_lineno.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_doctest/doctest_lineno.py 2024-04-02 08:25:04.000000000 +0000 @@ -67,3 +67,12 @@ # https://github.com/python/cpython/issues/99433 str_wrapper = object().__str__ + + +# https://github.com/python/cpython/issues/115392 +from test.test_doctest.decorator_mod import decorator + +@decorator +@decorator +def func_with_docstring_wrapped(): + """Some unrelated info.""" diff -Nru python3.11-3.11.8/Lib/test/test_doctest/test_doctest.py python3.11-3.11.9/Lib/test/test_doctest/test_doctest.py --- python3.11-3.11.8/Lib/test/test_doctest/test_doctest.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_doctest/test_doctest.py 2024-04-02 08:25:04.000000000 +0000 @@ -687,6 +687,7 @@ None test.test_doctest.doctest_lineno.MethodWrapper.method_without_docstring 61 test.test_doctest.doctest_lineno.MethodWrapper.property_with_doctest 4 test.test_doctest.doctest_lineno.func_with_docstring + 77 test.test_doctest.doctest_lineno.func_with_docstring_wrapped 12 test.test_doctest.doctest_lineno.func_with_doctest None test.test_doctest.doctest_lineno.func_without_docstring diff -Nru python3.11-3.11.8/Lib/test/test_email/test__header_value_parser.py python3.11-3.11.9/Lib/test/test_email/test__header_value_parser.py --- python3.11-3.11.8/Lib/test/test_email/test__header_value_parser.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_email/test__header_value_parser.py 2024-04-02 08:25:04.000000000 +0000 @@ -2985,6 +2985,11 @@ '=?utf-8?q?H=C3=BCbsch?= Kaktus ,\n' ' =?utf-8?q?bei=C3=9Ft_bei=C3=9Ft?= \n') + def test_address_list_with_list_separator_after_fold(self): + to = '0123456789' * 8 + '@foo, ä ' + self._test(parser.get_address_list(to)[0], + '0123456789' * 8 + '@foo,\n =?utf-8?q?=C3=A4?= \n') + # XXX Need tests with comments on various sides of a unicode token, # and with unicode tokens in the comments. Spaces inside the quotes # currently don't do the right thing. diff -Nru python3.11-3.11.8/Lib/test/test_email/test_email.py python3.11-3.11.9/Lib/test/test_email/test_email.py --- python3.11-3.11.8/Lib/test/test_email/test_email.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_email/test_email.py 2024-04-02 08:25:04.000000000 +0000 @@ -337,6 +337,21 @@ msg = email.message_from_bytes(source) self.assertEqual(msg.as_string(), expected) + def test_nonascii_as_string_with_ascii_charset(self): + m = textwrap.dedent("""\ + MIME-Version: 1.0 + Content-type: text/plain; charset="us-ascii" + Content-Transfer-Encoding: 8bit + + Test if non-ascii messages with no Content-Transfer-Encoding set + can be as_string'd: + Föö bär + """) + source = m.encode('iso-8859-1') + expected = source.decode('ascii', 'replace') + msg = email.message_from_bytes(source) + self.assertEqual(msg.as_string(), expected) + def test_nonascii_as_string_without_content_type_and_cte(self): m = textwrap.dedent("""\ MIME-Version: 1.0 diff -Nru python3.11-3.11.8/Lib/test/test_enum.py python3.11-3.11.9/Lib/test/test_enum.py --- python3.11-3.11.8/Lib/test/test_enum.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_enum.py 2024-04-02 08:25:04.000000000 +0000 @@ -903,6 +903,22 @@ class TestPlainFlag(_EnumTests, _PlainOutputTests, _FlagTests, unittest.TestCase): enum_type = Flag + def test_none_member(self): + class FlagWithNoneMember(Flag): + A = 1 + E = None + + self.assertEqual(FlagWithNoneMember.A.value, 1) + self.assertIs(FlagWithNoneMember.E.value, None) + with self.assertRaisesRegex(TypeError, r"'FlagWithNoneMember.E' cannot be combined with other flags with |"): + FlagWithNoneMember.A | FlagWithNoneMember.E + with self.assertRaisesRegex(TypeError, r"'FlagWithNoneMember.E' cannot be combined with other flags with &"): + FlagWithNoneMember.E & FlagWithNoneMember.A + with self.assertRaisesRegex(TypeError, r"'FlagWithNoneMember.E' cannot be combined with other flags with \^"): + FlagWithNoneMember.A ^ FlagWithNoneMember.E + with self.assertRaisesRegex(TypeError, r"'FlagWithNoneMember.E' cannot be inverted"): + ~FlagWithNoneMember.E + class TestIntEnum(_EnumTests, _MinimalOutputTests, unittest.TestCase): enum_type = IntEnum @@ -3574,6 +3590,8 @@ ) def test_global_enum_str(self): + self.assertEqual(repr(NoName.ONE), 'test_enum.ONE') + self.assertEqual(repr(NoName(0)), 'test_enum.NoName(0)') self.assertEqual(str(NoName.ONE & NoName.TWO), 'NoName(0)') self.assertEqual(str(NoName(0)), 'NoName(0)') @@ -4443,22 +4461,22 @@ | The value of the Enum member. |\x20\x20 | ---------------------------------------------------------------------- - | Methods inherited from enum.EnumType: + | Static methods inherited from enum.EnumType: |\x20\x20 - | __contains__(member) from enum.EnumType + | __contains__(member) | Return True if member is a member of this enum | raises TypeError if member is not an enum member |\x20\x20\x20\x20\x20\x20 | note: in 3.12 TypeError will no longer be raised, and True will also be | returned if member is the value of a member in this enum |\x20\x20 - | __getitem__(name) from enum.EnumType + | __getitem__(name) | Return the member matching `name`. |\x20\x20 - | __iter__() from enum.EnumType + | __iter__() | Return members in definition order. |\x20\x20 - | __len__() from enum.EnumType + | __len__() | Return the number of members (no aliases) |\x20\x20 | ---------------------------------------------------------------------- @@ -4474,7 +4492,7 @@ Help on class Color in module %s: class Color(enum.Enum) - | Color(value, names=None, *, module=None, qualname=None, type=None, start=1) + | Color(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None) |\x20\x20 | Method resolution order: | Color @@ -4483,11 +4501,11 @@ |\x20\x20 | Data and other attributes defined here: |\x20\x20 - | YELLOW = + | CYAN = |\x20\x20 | MAGENTA = |\x20\x20 - | CYAN = + | YELLOW = |\x20\x20 | ---------------------------------------------------------------------- | Data descriptors inherited from enum.Enum: @@ -4497,7 +4515,18 @@ | value |\x20\x20 | ---------------------------------------------------------------------- - | Data descriptors inherited from enum.EnumType: + | Static methods inherited from enum.EnumType: + |\x20\x20 + | __contains__(member) + |\x20\x20 + | __getitem__(name) + |\x20\x20 + | __iter__() + |\x20\x20 + | __len__() + |\x20\x20 + | ---------------------------------------------------------------------- + | Readonly properties inherited from enum.EnumType: |\x20\x20 | __members__""" diff -Nru python3.11-3.11.8/Lib/test/test_exceptions.py python3.11-3.11.9/Lib/test/test_exceptions.py --- python3.11-3.11.8/Lib/test/test_exceptions.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_exceptions.py 2024-04-02 08:25:04.000000000 +0000 @@ -234,7 +234,7 @@ check('Python = "\u1e54\xfd\u0163\u0125\xf2\xf1" +', 1, 20) check(b'# -*- coding: cp1251 -*-\nPython = "\xcf\xb3\xf2\xee\xed" +', 2, 19, encoding='cp1251') - check(b'Python = "\xcf\xb3\xf2\xee\xed" +', 1, 18) + check(b'Python = "\xcf\xb3\xf2\xee\xed" +', 1, 13) check('x = "a', 1, 5) check('lambda x: x = 2', 1, 1) check('f{a + b + c}', 1, 2) @@ -301,6 +301,7 @@ { 6 0="""''', 5, 13) + check('b"fooжжж"'.encode(), 1, 1, 1, 10) # Errors thrown by symtable.c check('x = [(yield i) for i in range(3)]', 1, 7) diff -Nru python3.11-3.11.8/Lib/test/test_ftplib.py python3.11-3.11.9/Lib/test/test_ftplib.py --- python3.11-3.11.8/Lib/test/test_ftplib.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_ftplib.py 2024-04-02 08:25:04.000000000 +0000 @@ -544,8 +544,8 @@ self.assertFalse(self.client.passiveserver) def test_voidcmd(self): - self.client.voidcmd('echo 200') - self.client.voidcmd('echo 299') + self.assertEqual(self.client.voidcmd('echo 200'), '200') + self.assertEqual(self.client.voidcmd('echo 299'), '299') self.assertRaises(ftplib.error_reply, self.client.voidcmd, 'echo 199') self.assertRaises(ftplib.error_reply, self.client.voidcmd, 'echo 300') diff -Nru python3.11-3.11.8/Lib/test/test_functools.py python3.11-3.11.9/Lib/test/test_functools.py --- python3.11-3.11.8/Lib/test/test_functools.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_functools.py 2024-04-02 08:25:04.000000000 +0000 @@ -2604,7 +2604,10 @@ A().static_func ): with self.subTest(meth=meth): - self.assertEqual(meth.__doc__, 'My function docstring') + self.assertEqual(meth.__doc__, + ('My function docstring' + if support.HAVE_DOCSTRINGS + else None)) self.assertEqual(meth.__annotations__['arg'], int) self.assertEqual(A.func.__name__, 'func') @@ -2693,7 +2696,10 @@ WithSingleDispatch().decorated_classmethod ): with self.subTest(meth=meth): - self.assertEqual(meth.__doc__, 'My function docstring') + self.assertEqual(meth.__doc__, + ('My function docstring' + if support.HAVE_DOCSTRINGS + else None)) self.assertEqual(meth.__annotations__['arg'], int) self.assertEqual( @@ -3057,7 +3063,10 @@ self.assertIsInstance(CachedCostItem.cost, py_functools.cached_property) def test_doc(self): - self.assertEqual(CachedCostItem.cost.__doc__, "The cost of the item.") + self.assertEqual(CachedCostItem.cost.__doc__, + ("The cost of the item." + if support.HAVE_DOCSTRINGS + else None)) if __name__ == '__main__': diff -Nru python3.11-3.11.8/Lib/test/test_gc.py python3.11-3.11.9/Lib/test/test_gc.py --- python3.11-3.11.8/Lib/test/test_gc.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_gc.py 2024-04-02 08:25:04.000000000 +0000 @@ -542,48 +542,6 @@ self.assertEqual(gc.collect(), 2) self.assertEqual(len(gc.garbage), garbagelen) - def test_boom_new(self): - # boom__new and boom2_new are exactly like boom and boom2, except use - # new-style classes. - - class Boom_New(object): - def __getattr__(self, someattribute): - del self.attr - raise AttributeError - - a = Boom_New() - b = Boom_New() - a.attr = b - b.attr = a - - gc.collect() - garbagelen = len(gc.garbage) - del a, b - self.assertEqual(gc.collect(), 2) - self.assertEqual(len(gc.garbage), garbagelen) - - def test_boom2_new(self): - class Boom2_New(object): - def __init__(self): - self.x = 0 - - def __getattr__(self, someattribute): - self.x += 1 - if self.x > 1: - del self.attr - raise AttributeError - - a = Boom2_New() - b = Boom2_New() - a.attr = b - b.attr = a - - gc.collect() - garbagelen = len(gc.garbage) - del a, b - self.assertEqual(gc.collect(), 2) - self.assertEqual(len(gc.garbage), garbagelen) - def test_get_referents(self): alist = [1, 3, 5] got = gc.get_referents(alist) diff -Nru python3.11-3.11.8/Lib/test/test_glob.py python3.11-3.11.9/Lib/test/test_glob.py --- python3.11-3.11.8/Lib/test/test_glob.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_glob.py 2024-04-02 08:25:04.000000000 +0000 @@ -40,6 +40,11 @@ os.symlink(self.norm('broken'), self.norm('sym1')) os.symlink('broken', self.norm('sym2')) os.symlink(os.path.join('a', 'bcd'), self.norm('sym3')) + self.open_dirfd() + + def open_dirfd(self): + if self.dir_fd is not None: + os.close(self.dir_fd) if {os.open, os.stat} <= os.supports_dir_fd and os.scandir in os.supports_fd: self.dir_fd = os.open(self.tempdir, os.O_RDONLY | os.O_DIRECTORY) else: @@ -332,6 +337,33 @@ eq(glob.glob('**', recursive=True, include_hidden=True), [join(*i) for i in full+rec]) + def test_glob_non_directory(self): + eq = self.assertSequencesEqual_noorder + eq(self.rglob('EF'), self.joins(('EF',))) + eq(self.rglob('EF', ''), []) + eq(self.rglob('EF', '*'), []) + eq(self.rglob('EF', '**'), []) + eq(self.rglob('nonexistent'), []) + eq(self.rglob('nonexistent', ''), []) + eq(self.rglob('nonexistent', '*'), []) + eq(self.rglob('nonexistent', '**'), []) + + @unittest.skipUnless(hasattr(os, "mkfifo"), 'requires os.mkfifo()') + @unittest.skipIf(sys.platform == "vxworks", + "fifo requires special path on VxWorks") + def test_glob_named_pipe(self): + path = os.path.join(self.tempdir, 'mypipe') + os.mkfifo(path) + + # gh-117127: Reopen self.dir_fd to pick up directory changes + self.open_dirfd() + + self.assertEqual(self.rglob('mypipe'), [path]) + self.assertEqual(self.rglob('mypipe*'), [path]) + self.assertEqual(self.rglob('mypipe', ''), []) + self.assertEqual(self.rglob('mypipe', 'sub'), []) + self.assertEqual(self.rglob('mypipe', '*'), []) + def test_glob_many_open_files(self): depth = 30 base = os.path.join(self.tempdir, 'deep') diff -Nru python3.11-3.11.8/Lib/test/test_gzip.py python3.11-3.11.9/Lib/test/test_gzip.py --- python3.11-3.11.8/Lib/test/test_gzip.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_gzip.py 2024-04-02 08:25:04.000000000 +0000 @@ -5,7 +5,6 @@ import functools import io import os -import pathlib import struct import sys import unittest @@ -78,16 +77,18 @@ f.close() def test_write_read_with_pathlike_file(self): - filename = pathlib.Path(self.filename) + filename = os_helper.FakePath(self.filename) with gzip.GzipFile(filename, 'w') as f: f.write(data1 * 50) self.assertIsInstance(f.name, str) + self.assertEqual(f.name, self.filename) with gzip.GzipFile(filename, 'a') as f: f.write(data1) with gzip.GzipFile(filename) as f: d = f.read() self.assertEqual(d, data1 * 51) self.assertIsInstance(f.name, str) + self.assertEqual(f.name, self.filename) # The following test_write_xy methods test that write accepts # the corresponding bytes-like object type as input @@ -471,13 +472,118 @@ with io.TextIOWrapper(f, encoding="ascii") as t: self.assertEqual(t.readlines(), lines) + def test_fileobj_with_name(self): + with open(self.filename, "xb") as raw: + with gzip.GzipFile(fileobj=raw, mode="x") as f: + f.write(b'one') + self.assertEqual(f.name, raw.name) + self.assertEqual(f.fileno(), raw.fileno()) + self.assertEqual(f.mode, gzip.WRITE) + self.assertIs(f.readable(), False) + self.assertIs(f.writable(), True) + self.assertIs(f.seekable(), True) + self.assertIs(f.closed, False) + self.assertIs(f.closed, True) + self.assertEqual(f.name, raw.name) + self.assertRaises(AttributeError, f.fileno) + self.assertEqual(f.mode, gzip.WRITE) + self.assertIs(f.readable(), False) + self.assertIs(f.writable(), True) + self.assertIs(f.seekable(), True) + + with open(self.filename, "wb") as raw: + with gzip.GzipFile(fileobj=raw, mode="w") as f: + f.write(b'two') + self.assertEqual(f.name, raw.name) + self.assertEqual(f.fileno(), raw.fileno()) + self.assertEqual(f.mode, gzip.WRITE) + self.assertIs(f.readable(), False) + self.assertIs(f.writable(), True) + self.assertIs(f.seekable(), True) + self.assertIs(f.closed, False) + self.assertIs(f.closed, True) + self.assertEqual(f.name, raw.name) + self.assertRaises(AttributeError, f.fileno) + self.assertEqual(f.mode, gzip.WRITE) + self.assertIs(f.readable(), False) + self.assertIs(f.writable(), True) + self.assertIs(f.seekable(), True) + + with open(self.filename, "ab") as raw: + with gzip.GzipFile(fileobj=raw, mode="a") as f: + f.write(b'three') + self.assertEqual(f.name, raw.name) + self.assertEqual(f.fileno(), raw.fileno()) + self.assertEqual(f.mode, gzip.WRITE) + self.assertIs(f.readable(), False) + self.assertIs(f.writable(), True) + self.assertIs(f.seekable(), True) + self.assertIs(f.closed, False) + self.assertIs(f.closed, True) + self.assertEqual(f.name, raw.name) + self.assertRaises(AttributeError, f.fileno) + self.assertEqual(f.mode, gzip.WRITE) + self.assertIs(f.readable(), False) + self.assertIs(f.writable(), True) + self.assertIs(f.seekable(), True) + + with open(self.filename, "rb") as raw: + with gzip.GzipFile(fileobj=raw, mode="r") as f: + self.assertEqual(f.read(), b'twothree') + self.assertEqual(f.name, raw.name) + self.assertEqual(f.fileno(), raw.fileno()) + self.assertEqual(f.mode, gzip.READ) + self.assertIs(f.readable(), True) + self.assertIs(f.writable(), False) + self.assertIs(f.seekable(), True) + self.assertIs(f.closed, False) + self.assertIs(f.closed, True) + self.assertEqual(f.name, raw.name) + self.assertRaises(AttributeError, f.fileno) + self.assertEqual(f.mode, gzip.READ) + self.assertIs(f.readable(), True) + self.assertIs(f.writable(), False) + self.assertIs(f.seekable(), True) + def test_fileobj_from_fdopen(self): # Issue #13781: Opening a GzipFile for writing fails when using a # fileobj created with os.fdopen(). - fd = os.open(self.filename, os.O_WRONLY | os.O_CREAT) - with os.fdopen(fd, "wb") as f: - with gzip.GzipFile(fileobj=f, mode="w") as g: - pass + fd = os.open(self.filename, os.O_WRONLY | os.O_CREAT | os.O_EXCL) + with os.fdopen(fd, "xb") as raw: + with gzip.GzipFile(fileobj=raw, mode="x") as f: + f.write(b'one') + self.assertEqual(f.name, '') + self.assertEqual(f.fileno(), raw.fileno()) + self.assertIs(f.closed, True) + self.assertEqual(f.name, '') + self.assertRaises(AttributeError, f.fileno) + + fd = os.open(self.filename, os.O_WRONLY | os.O_CREAT | os.O_TRUNC) + with os.fdopen(fd, "wb") as raw: + with gzip.GzipFile(fileobj=raw, mode="w") as f: + f.write(b'two') + self.assertEqual(f.name, '') + self.assertEqual(f.fileno(), raw.fileno()) + self.assertEqual(f.name, '') + self.assertRaises(AttributeError, f.fileno) + + fd = os.open(self.filename, os.O_WRONLY | os.O_CREAT | os.O_APPEND) + with os.fdopen(fd, "ab") as raw: + with gzip.GzipFile(fileobj=raw, mode="a") as f: + f.write(b'three') + self.assertEqual(f.name, '') + self.assertEqual(f.fileno(), raw.fileno()) + self.assertEqual(f.name, '') + self.assertRaises(AttributeError, f.fileno) + + fd = os.open(self.filename, os.O_RDONLY) + with os.fdopen(fd, "rb") as raw: + with gzip.GzipFile(fileobj=raw, mode="r") as f: + self.assertEqual(f.read(), b'twothree') + self.assertEqual(f.name, '') + self.assertEqual(f.fileno(), raw.fileno()) + self.assertEqual(f.name, '') + self.assertRaises(AttributeError, f.fileno) def test_fileobj_mode(self): gzip.GzipFile(self.filename, "wb").close() @@ -507,17 +613,69 @@ def test_bytes_filename(self): str_filename = self.filename - try: - bytes_filename = str_filename.encode("ascii") - except UnicodeEncodeError: - self.skipTest("Temporary file name needs to be ASCII") + bytes_filename = os.fsencode(str_filename) with gzip.GzipFile(bytes_filename, "wb") as f: f.write(data1 * 50) + self.assertEqual(f.name, bytes_filename) with gzip.GzipFile(bytes_filename, "rb") as f: self.assertEqual(f.read(), data1 * 50) + self.assertEqual(f.name, bytes_filename) # Sanity check that we are actually operating on the right file. with gzip.GzipFile(str_filename, "rb") as f: self.assertEqual(f.read(), data1 * 50) + self.assertEqual(f.name, str_filename) + + def test_fileobj_without_name(self): + bio = io.BytesIO() + with gzip.GzipFile(fileobj=bio, mode='wb') as f: + f.write(data1 * 50) + self.assertEqual(f.name, '') + self.assertRaises(io.UnsupportedOperation, f.fileno) + self.assertEqual(f.mode, gzip.WRITE) + self.assertIs(f.readable(), False) + self.assertIs(f.writable(), True) + self.assertIs(f.seekable(), True) + self.assertIs(f.closed, False) + self.assertIs(f.closed, True) + self.assertEqual(f.name, '') + self.assertRaises(AttributeError, f.fileno) + self.assertEqual(f.mode, gzip.WRITE) + self.assertIs(f.readable(), False) + self.assertIs(f.writable(), True) + self.assertIs(f.seekable(), True) + + bio.seek(0) + with gzip.GzipFile(fileobj=bio, mode='rb') as f: + self.assertEqual(f.read(), data1 * 50) + self.assertEqual(f.name, '') + self.assertRaises(io.UnsupportedOperation, f.fileno) + self.assertEqual(f.mode, gzip.READ) + self.assertIs(f.readable(), True) + self.assertIs(f.writable(), False) + self.assertIs(f.seekable(), True) + self.assertIs(f.closed, False) + self.assertIs(f.closed, True) + self.assertEqual(f.name, '') + self.assertRaises(AttributeError, f.fileno) + self.assertEqual(f.mode, gzip.READ) + self.assertIs(f.readable(), True) + self.assertIs(f.writable(), False) + self.assertIs(f.seekable(), True) + + def test_fileobj_and_filename(self): + filename2 = self.filename + 'new' + with (open(self.filename, 'wb') as fileobj, + gzip.GzipFile(fileobj=fileobj, filename=filename2, mode='wb') as f): + f.write(data1 * 50) + self.assertEqual(f.name, filename2) + with (open(self.filename, 'rb') as fileobj, + gzip.GzipFile(fileobj=fileobj, filename=filename2, mode='rb') as f): + self.assertEqual(f.read(), data1 * 50) + self.assertEqual(f.name, filename2) + # Sanity check that we are actually operating on the right file. + with gzip.GzipFile(self.filename, 'rb') as f: + self.assertEqual(f.read(), data1 * 50) + self.assertEqual(f.name, self.filename) def test_decompress_limited(self): """Decompressed data buffering should be limited""" @@ -646,13 +804,16 @@ self.assertEqual(file_data, uncompressed) def test_pathlike_file(self): - filename = pathlib.Path(self.filename) + filename = os_helper.FakePath(self.filename) with gzip.open(filename, "wb") as f: f.write(data1 * 50) + self.assertEqual(f.name, self.filename) with gzip.open(filename, "ab") as f: f.write(data1) + self.assertEqual(f.name, self.filename) with gzip.open(filename) as f: self.assertEqual(f.read(), data1 * 51) + self.assertEqual(f.name, self.filename) def test_implicit_binary_modes(self): # Test implicit binary modes (no "b" or "t" in mode string). diff -Nru python3.11-3.11.8/Lib/test/test_hmac.py python3.11-3.11.9/Lib/test/test_hmac.py --- python3.11-3.11.8/Lib/test/test_hmac.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_hmac.py 2024-04-02 08:25:04.000000000 +0000 @@ -479,6 +479,14 @@ self.fail("Exception raised during normal usage of HMAC class.") +class UpdateTestCase(unittest.TestCase): + @hashlib_helper.requires_hashdigest('sha256') + def test_with_str_update(self): + with self.assertRaises(TypeError): + h = hmac.new(b"key", digestmod='sha256') + h.update("invalid update") + + class CopyTestCase(unittest.TestCase): @hashlib_helper.requires_hashdigest('sha256') diff -Nru python3.11-3.11.8/Lib/test/test_httplib.py python3.11-3.11.9/Lib/test/test_httplib.py --- python3.11-3.11.8/Lib/test/test_httplib.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_httplib.py 2024-04-02 08:25:04.000000000 +0000 @@ -2267,6 +2267,22 @@ self.assertIn(b'CONNECT destination.com', self.conn.sock.data) self.assertIn(b'Host: destination.com', self.conn.sock.data) + def test_connect_put_request_ipv6(self): + self.conn.set_tunnel('[1:2:3::4]', 1234) + self.conn.request('PUT', '/', '') + self.assertEqual(self.conn.sock.host, self.host) + self.assertEqual(self.conn.sock.port, client.HTTP_PORT) + self.assertIn(b'CONNECT [1:2:3::4]:1234', self.conn.sock.data) + self.assertIn(b'Host: [1:2:3::4]:1234', self.conn.sock.data) + + def test_connect_put_request_ipv6_port(self): + self.conn.set_tunnel('[1:2:3::4]:1234') + self.conn.request('PUT', '/', '') + self.assertEqual(self.conn.sock.host, self.host) + self.assertEqual(self.conn.sock.port, client.HTTP_PORT) + self.assertIn(b'CONNECT [1:2:3::4]:1234', self.conn.sock.data) + self.assertIn(b'Host: [1:2:3::4]:1234', self.conn.sock.data) + def test_tunnel_debuglog(self): expected_header = 'X-Dummy: 1' response_text = 'HTTP/1.0 200 OK\r\n{}\r\n\r\n'.format(expected_header) diff -Nru python3.11-3.11.8/Lib/test/test_imp.py python3.11-3.11.9/Lib/test/test_imp.py --- python3.11-3.11.8/Lib/test/test_imp.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_imp.py 2024-04-02 08:25:04.000000000 +0000 @@ -10,6 +10,7 @@ from test.support import os_helper from test.support import script_helper from test.support import warnings_helper +from test.support import is_wasi import unittest import warnings imp = warnings_helper.import_deprecated('imp') @@ -23,6 +24,8 @@ """Decorator to skip a test if not running under CPython or lacking imp.load_dynamic().""" meth = support.cpython_only(meth) + if is_wasi: + return unittest.skipIf(True, 'Not supoorted in WASI')(meth) return unittest.skipIf(getattr(imp, 'load_dynamic', None) is None, 'imp.load_dynamic() required')(meth) diff -Nru python3.11-3.11.8/Lib/test/test_importlib/extension/test_case_sensitivity.py python3.11-3.11.9/Lib/test/test_importlib/extension/test_case_sensitivity.py --- python3.11-3.11.8/Lib/test/test_importlib/extension/test_case_sensitivity.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_importlib/extension/test_case_sensitivity.py 2024-04-02 08:25:04.000000000 +0000 @@ -8,7 +8,8 @@ machinery = util.import_importlib('importlib.machinery') -@unittest.skipIf(util.EXTENSIONS.filename is None, '_testcapi not available') +@unittest.skipIf(util.EXTENSIONS is None or util.EXTENSIONS.filename is None, + 'dynamic loading not supported or test module not available') @util.case_insensitive_tests class ExtensionModuleCaseSensitivityTest(util.CASEOKTestBase): diff -Nru python3.11-3.11.8/Lib/test/test_importlib/extension/test_finder.py python3.11-3.11.9/Lib/test/test_importlib/extension/test_finder.py --- python3.11-3.11.8/Lib/test/test_importlib/extension/test_finder.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_importlib/extension/test_finder.py 2024-04-02 08:25:04.000000000 +0000 @@ -11,7 +11,7 @@ """Test the finder for extension modules.""" def setUp(self): - if not self.machinery.EXTENSION_SUFFIXES: + if not self.machinery.EXTENSION_SUFFIXES or not util.EXTENSIONS: raise unittest.SkipTest("Requires dynamic loading support.") if util.EXTENSIONS.name in sys.builtin_module_names: raise unittest.SkipTest( diff -Nru python3.11-3.11.8/Lib/test/test_importlib/extension/test_loader.py python3.11-3.11.9/Lib/test/test_importlib/extension/test_loader.py --- python3.11-3.11.8/Lib/test/test_importlib/extension/test_loader.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_importlib/extension/test_loader.py 2024-04-02 08:25:04.000000000 +0000 @@ -19,7 +19,7 @@ """Test load_module() for extension modules.""" def setUp(self): - if not self.machinery.EXTENSION_SUFFIXES: + if not self.machinery.EXTENSION_SUFFIXES or not util.EXTENSIONS: raise unittest.SkipTest("Requires dynamic loading support.") if util.EXTENSIONS.name in sys.builtin_module_names: raise unittest.SkipTest( @@ -99,7 +99,7 @@ # Test loading extension modules with multi-phase initialization (PEP 489). def setUp(self): - if not self.machinery.EXTENSION_SUFFIXES: + if not self.machinery.EXTENSION_SUFFIXES or not util.EXTENSIONS: raise unittest.SkipTest("Requires dynamic loading support.") self.name = '_testmultiphase' if self.name in sys.builtin_module_names: diff -Nru python3.11-3.11.8/Lib/test/test_importlib/extension/test_path_hook.py python3.11-3.11.9/Lib/test/test_importlib/extension/test_path_hook.py --- python3.11-3.11.8/Lib/test/test_importlib/extension/test_path_hook.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_importlib/extension/test_path_hook.py 2024-04-02 08:25:04.000000000 +0000 @@ -5,6 +5,8 @@ import unittest +@unittest.skipIf(util.EXTENSIONS is None or util.EXTENSIONS.filename is None, + 'dynamic loading not supported or test module not available') class PathHookTests: """Test the path hook for extension modules.""" diff -Nru python3.11-3.11.8/Lib/test/test_importlib/test_lazy.py python3.11-3.11.9/Lib/test/test_importlib/test_lazy.py --- python3.11-3.11.8/Lib/test/test_importlib/test_lazy.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_importlib/test_lazy.py 2024-04-02 08:25:04.000000000 +0000 @@ -2,9 +2,12 @@ from importlib import abc from importlib import util import sys +import time +import threading import types import unittest +from test.support import threading_helper from test.test_importlib import util as test_util @@ -40,6 +43,7 @@ module_name = 'lazy_loader_test' mutated_name = 'changed' loaded = None + load_count = 0 source_code = 'attr = 42; __name__ = {!r}'.format(mutated_name) def find_spec(self, name, path, target=None): @@ -48,8 +52,10 @@ return util.spec_from_loader(name, util.LazyLoader(self)) def exec_module(self, module): + time.sleep(0.01) # Simulate a slow load. exec(self.source_code, module.__dict__) self.loaded = module + self.load_count += 1 class LazyLoaderTests(unittest.TestCase): @@ -59,8 +65,9 @@ # Classes that don't define exec_module() trigger TypeError. util.LazyLoader(object) - def new_module(self, source_code=None): - loader = TestingImporter() + def new_module(self, source_code=None, loader=None): + if loader is None: + loader = TestingImporter() if source_code is not None: loader.source_code = source_code spec = util.spec_from_loader(TestingImporter.module_name, @@ -140,6 +147,55 @@ # Force the load; just care that no exception is raised. module.__name__ + @threading_helper.requires_working_threading() + def test_module_load_race(self): + with test_util.uncache(TestingImporter.module_name): + loader = TestingImporter() + module = self.new_module(loader=loader) + self.assertEqual(loader.load_count, 0) + + class RaisingThread(threading.Thread): + exc = None + def run(self): + try: + super().run() + except Exception as exc: + self.exc = exc + + def access_module(): + return module.attr + + threads = [] + for _ in range(2): + threads.append(thread := RaisingThread(target=access_module)) + thread.start() + + # Races could cause errors + for thread in threads: + thread.join() + self.assertIsNone(thread.exc) + + # Or multiple load attempts + self.assertEqual(loader.load_count, 1) + + def test_lazy_self_referential_modules(self): + # Directory modules with submodules that reference the parent can attempt to access + # the parent module during a load. Verify that this common pattern works with lazy loading. + # json is a good example in the stdlib. + json_modules = [name for name in sys.modules if name.startswith('json')] + with test_util.uncache(*json_modules): + # Standard lazy loading, unwrapped + spec = util.find_spec('json') + loader = util.LazyLoader(spec.loader) + spec.loader = loader + module = util.module_from_spec(spec) + sys.modules['json'] = module + loader.exec_module(module) + + # Trigger load with attribute lookup, ensure expected behavior + test_load = module.loads('{}') + self.assertEqual(test_load, {}) + if __name__ == '__main__': unittest.main() diff -Nru python3.11-3.11.8/Lib/test/test_importlib/test_spec.py python3.11-3.11.9/Lib/test/test_importlib/test_spec.py --- python3.11-3.11.8/Lib/test/test_importlib/test_spec.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_importlib/test_spec.py 2024-04-02 08:25:04.000000000 +0000 @@ -645,7 +645,8 @@ self.assertEqual(spec.loader, self.fileloader) self.assertEqual(spec.origin, self.path) self.assertIs(spec.loader_state, None) - self.assertEqual(spec.submodule_search_locations, [os.getcwd()]) + location = cwd if (cwd := os.getcwd()) != '/' else '' + self.assertEqual(spec.submodule_search_locations, [location]) self.assertEqual(spec.cached, self.cached) self.assertTrue(spec.has_location) @@ -744,7 +745,8 @@ self.assertEqual(spec.loader, self.fileloader) self.assertEqual(spec.origin, self.path) self.assertIs(spec.loader_state, None) - self.assertEqual(spec.submodule_search_locations, [os.getcwd()]) + location = cwd if (cwd := os.getcwd()) != '/' else '' + self.assertEqual(spec.submodule_search_locations, [location]) self.assertEqual(spec.cached, self.cached) self.assertTrue(spec.has_location) @@ -769,7 +771,8 @@ self.assertEqual(spec.loader, self.pkgloader) self.assertEqual(spec.origin, self.path) self.assertIs(spec.loader_state, None) - self.assertEqual(spec.submodule_search_locations, [os.getcwd()]) + location = cwd if (cwd := os.getcwd()) != '/' else '' + self.assertEqual(spec.submodule_search_locations, [location]) self.assertEqual(spec.cached, self.cached) self.assertTrue(spec.has_location) diff -Nru python3.11-3.11.8/Lib/test/test_importlib/test_util.py python3.11-3.11.9/Lib/test/test_importlib/test_util.py --- python3.11-3.11.8/Lib/test/test_importlib/test_util.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_importlib/test_util.py 2024-04-02 08:25:04.000000000 +0000 @@ -803,7 +803,7 @@ with util.temporary_pycache_prefix(pycache_prefix): self.assertEqual( self.util.cache_from_source(path, optimization=''), - expect) + os.path.normpath(expect)) @unittest.skipIf(sys.implementation.cache_tag is None, 'requires sys.implementation.cache_tag to not be None') diff -Nru python3.11-3.11.8/Lib/test/test_importlib/util.py python3.11-3.11.9/Lib/test/test_importlib/util.py --- python3.11-3.11.8/Lib/test/test_importlib/util.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_importlib/util.py 2024-04-02 08:25:04.000000000 +0000 @@ -6,6 +6,7 @@ import marshal import os import os.path +from test import support from test.support import import_helper from test.support import os_helper import unittest @@ -22,25 +23,34 @@ if 'importlib' not in sys.builtin_module_names: BUILTINS.bad_name = 'importlib' -EXTENSIONS = types.SimpleNamespace() -EXTENSIONS.path = None -EXTENSIONS.ext = None -EXTENSIONS.filename = None -EXTENSIONS.file_path = None -EXTENSIONS.name = '_testcapi' - -def _extension_details(): - global EXTENSIONS - for path in sys.path: - for ext in machinery.EXTENSION_SUFFIXES: - filename = EXTENSIONS.name + ext - file_path = os.path.join(path, filename) - if os.path.exists(file_path): - EXTENSIONS.path = path - EXTENSIONS.ext = ext - EXTENSIONS.filename = filename - EXTENSIONS.file_path = file_path - return +if support.is_wasi: + # dlopen() is a shim for WASI as of WASI SDK which fails by default. + # We don't provide an implementation, so tests will fail. + # But we also don't want to turn off dynamic loading for those that provide + # a working implementation. + def _extension_details(): + global EXTENSIONS + EXTENSIONS = None +else: + EXTENSIONS = types.SimpleNamespace() + EXTENSIONS.path = None + EXTENSIONS.ext = None + EXTENSIONS.filename = None + EXTENSIONS.file_path = None + EXTENSIONS.name = '_testcapi' + + def _extension_details(): + global EXTENSIONS + for path in sys.path: + for ext in machinery.EXTENSION_SUFFIXES: + filename = EXTENSIONS.name + ext + file_path = os.path.join(path, filename) + if os.path.exists(file_path): + EXTENSIONS.path = path + EXTENSIONS.ext = ext + EXTENSIONS.filename = filename + EXTENSIONS.file_path = file_path + return _extension_details() diff -Nru python3.11-3.11.8/Lib/test/test_inspect/test_inspect.py python3.11-3.11.9/Lib/test/test_inspect/test_inspect.py --- python3.11-3.11.8/Lib/test/test_inspect/test_inspect.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_inspect/test_inspect.py 2024-04-02 08:25:04.000000000 +0000 @@ -32,11 +32,11 @@ from test.support.script_helper import assert_python_ok, assert_python_failure from test import support -from . import inspect_fodder as mod -from . import inspect_fodder2 as mod2 -from . import inspect_stock_annotations -from . import inspect_stringized_annotations -from . import inspect_stringized_annotations_2 +from test.test_inspect import inspect_fodder as mod +from test.test_inspect import inspect_fodder2 as mod2 +from test.test_inspect import inspect_stock_annotations +from test.test_inspect import inspect_stringized_annotations +from test.test_inspect import inspect_stringized_annotations_2 # Functions tested in this suite: @@ -2569,9 +2569,12 @@ # This doesn't work now. # (We don't have a valid signature for "type" in 3.4) + class ThisWorksNow: + __call__ = type + # TODO: Support type. + self.assertEqual(ThisWorksNow()(1), int) + self.assertEqual(ThisWorksNow()('A', (), {}).__name__, 'A') with self.assertRaisesRegex(ValueError, "no signature found"): - class ThisWorksNow: - __call__ = type test_callable(ThisWorksNow()) # Regression test for issue #20786 @@ -3108,6 +3111,98 @@ ((('a', ..., ..., "positional_or_keyword"),), ...)) + with self.subTest('classmethod'): + class CM(type): + @classmethod + def __call__(cls, a): + return a + class C(metaclass=CM): + def __init__(self, b): + pass + + self.assertEqual(C(1), 1) + self.assertEqual(self.signature(C), + ((('a', ..., ..., "positional_or_keyword"),), + ...)) + + with self.subTest('staticmethod'): + class CM(type): + @staticmethod + def __call__(a): + return a + class C(metaclass=CM): + def __init__(self, b): + pass + + self.assertEqual(C(1), 1) + self.assertEqual(self.signature(C), + ((('a', ..., ..., "positional_or_keyword"),), + ...)) + + with self.subTest('MethodType'): + class A: + def call(self, a): + return a + class CM(type): + __call__ = A().call + class C(metaclass=CM): + def __init__(self, b): + pass + + self.assertEqual(C(1), 1) + self.assertEqual(self.signature(C), + ((('a', ..., ..., "positional_or_keyword"),), + ...)) + + with self.subTest('partial'): + class CM(type): + __call__ = functools.partial(lambda x, a: (x, a), 2) + class C(metaclass=CM): + def __init__(self, b): + pass + + self.assertEqual(C(1), (2, 1)) + self.assertEqual(self.signature(C), + ((('a', ..., ..., "positional_or_keyword"),), + ...)) + + with self.subTest('partialmethod'): + class CM(type): + __call__ = functools.partialmethod(lambda self, x, a: (x, a), 2) + class C(metaclass=CM): + def __init__(self, b): + pass + + self.assertEqual(C(1), (2, 1)) + self.assertEqual(self.signature(C), + ((('a', ..., ..., "positional_or_keyword"),), + ...)) + + with self.subTest('BuiltinMethodType'): + class CM(type): + __call__ = ':'.join + class C(metaclass=CM): + def __init__(self, b): + pass + + self.assertEqual(C(['a', 'bc']), 'a:bc') + # BUG: Returns '' + with self.assertRaises(AssertionError): + self.assertEqual(self.signature(C), self.signature(''.join)) + + with self.subTest('MethodWrapperType'): + class CM(type): + __call__ = (2).__pow__ + class C(metaclass=CM): + def __init__(self, b): + pass + + self.assertEqual(C(3), 8) + self.assertEqual(C(3, 7), 1) + # BUG: Returns '' + with self.assertRaises(AssertionError): + self.assertEqual(self.signature(C), self.signature((0).__pow__)) + class CM(type): def __new__(mcls, name, bases, dct, *, foo=1): return super().__new__(mcls, name, bases, dct) @@ -3169,6 +3264,169 @@ ('bar', 2, ..., "keyword_only")), ...)) + def test_signature_on_class_with_init(self): + class C: + def __init__(self, b): + pass + + C(1) # does not raise + self.assertEqual(self.signature(C), + ((('b', ..., ..., "positional_or_keyword"),), + ...)) + + with self.subTest('classmethod'): + class C: + @classmethod + def __init__(cls, b): + pass + + C(1) # does not raise + self.assertEqual(self.signature(C), + ((('b', ..., ..., "positional_or_keyword"),), + ...)) + + with self.subTest('staticmethod'): + class C: + @staticmethod + def __init__(b): + pass + + C(1) # does not raise + self.assertEqual(self.signature(C), + ((('b', ..., ..., "positional_or_keyword"),), + ...)) + + with self.subTest('MethodType'): + class A: + def call(self, a): + pass + class C: + __init__ = A().call + + C(1) # does not raise + self.assertEqual(self.signature(C), + ((('a', ..., ..., "positional_or_keyword"),), + ...)) + + with self.subTest('partial'): + class C: + __init__ = functools.partial(lambda x, a: None, 2) + + C(1) # does not raise + self.assertEqual(self.signature(C), + ((('a', ..., ..., "positional_or_keyword"),), + ...)) + + with self.subTest('partialmethod'): + class C: + def _init(self, x, a): + self.a = (x, a) + __init__ = functools.partialmethod(_init, 2) + + self.assertEqual(C(1).a, (2, 1)) + self.assertEqual(self.signature(C), + ((('a', ..., ..., "positional_or_keyword"),), + ...)) + + def test_signature_on_class_with_new(self): + with self.subTest('FunctionType'): + class C: + def __new__(cls, a): + return a + + self.assertEqual(C(1), 1) + self.assertEqual(self.signature(C), + ((('a', ..., ..., "positional_or_keyword"),), + ...)) + + with self.subTest('classmethod'): + class C: + @classmethod + def __new__(cls, cls2, a): + return a + + self.assertEqual(C(1), 1) + self.assertEqual(self.signature(C), + ((('a', ..., ..., "positional_or_keyword"),), + ...)) + + with self.subTest('staticmethod'): + class C: + @staticmethod + def __new__(cls, a): + return a + + self.assertEqual(C(1), 1) + self.assertEqual(self.signature(C), + ((('a', ..., ..., "positional_or_keyword"),), + ...)) + + with self.subTest('MethodType'): + class A: + def call(self, cls, a): + return a + class C: + __new__ = A().call + + self.assertEqual(C(1), 1) + self.assertEqual(self.signature(C), + ((('a', ..., ..., "positional_or_keyword"),), + ...)) + + with self.subTest('partial'): + class C: + __new__ = functools.partial(lambda x, cls, a: (x, a), 2) + + self.assertEqual(C(1), (2, 1)) + self.assertEqual(self.signature(C), + ((('a', ..., ..., "positional_or_keyword"),), + ...)) + + with self.subTest('partialmethod'): + class C: + __new__ = functools.partialmethod(lambda cls, x, a: (x, a), 2) + + self.assertEqual(C(1), (2, 1)) + self.assertEqual(self.signature(C), + ((('a', ..., ..., "positional_or_keyword"),), + ...)) + + with self.subTest('BuiltinMethodType'): + class C: + __new__ = str.__subclasscheck__ + + self.assertEqual(C(), False) + # TODO: Support BuiltinMethodType + # self.assertEqual(self.signature(C), ((), ...)) + self.assertRaises(ValueError, self.signature, C) + + with self.subTest('MethodWrapperType'): + class C: + __new__ = type.__or__.__get__(int, type) + + self.assertEqual(C(), C | int) + # TODO: Support MethodWrapperType + # self.assertEqual(self.signature(C), ((), ...)) + self.assertRaises(ValueError, self.signature, C) + + # TODO: Test ClassMethodDescriptorType + + with self.subTest('MethodDescriptorType'): + class C: + __new__ = type.__dict__['__subclasscheck__'] + + self.assertEqual(C(C), True) + self.assertEqual(self.signature(C), self.signature(C.__subclasscheck__)) + + with self.subTest('WrapperDescriptorType'): + class C: + __new__ = type.__or__ + + self.assertEqual(C(int), C | int) + # TODO: Support WrapperDescriptorType + # self.assertEqual(self.signature(C), self.signature(C.__or__)) + self.assertRaises(ValueError, self.signature, C) + def test_signature_on_subclass(self): class A: def __new__(cls, a=1, *args, **kwargs): @@ -3222,8 +3480,11 @@ # Test meta-classes without user-defined __init__ or __new__ class C(type): pass class D(C): pass + self.assertEqual(C('A', (), {}).__name__, 'A') + # TODO: Support type. with self.assertRaisesRegex(ValueError, "callable.*is not supported"): self.assertEqual(inspect.signature(C), None) + self.assertEqual(D('A', (), {}).__name__, 'A') with self.assertRaisesRegex(ValueError, "callable.*is not supported"): self.assertEqual(inspect.signature(D), None) @@ -3273,16 +3534,117 @@ ((('a', ..., ..., "positional_or_keyword"),), ...)) - class Wrapped: - pass - Wrapped.__wrapped__ = lambda a: None - self.assertEqual(self.signature(Wrapped), + with self.subTest('classmethod'): + class C: + @classmethod + def __call__(cls, a): + pass + + self.assertEqual(self.signature(C()), + ((('a', ..., ..., "positional_or_keyword"),), + ...)) + + with self.subTest('staticmethod'): + class C: + @staticmethod + def __call__(a): + pass + + self.assertEqual(self.signature(C()), + ((('a', ..., ..., "positional_or_keyword"),), + ...)) + + with self.subTest('MethodType'): + class A: + def call(self, a): + return a + class C: + __call__ = A().call + + self.assertEqual(C()(1), 1) + self.assertEqual(self.signature(C()), + ((('a', ..., ..., "positional_or_keyword"),), + ...)) + + with self.subTest('partial'): + class C: + __call__ = functools.partial(lambda x, a: (x, a), 2) + + self.assertEqual(C()(1), (2, 1)) + self.assertEqual(self.signature(C()), + ((('a', ..., ..., "positional_or_keyword"),), + ...)) + + with self.subTest('partialmethod'): + class C: + __call__ = functools.partialmethod(lambda self, x, a: (x, a), 2) + + self.assertEqual(C()(1), (2, 1)) + self.assertEqual(self.signature(C()), + ((('a', ..., ..., "positional_or_keyword"),), + ...)) + + with self.subTest('BuiltinMethodType'): + class C: + __call__ = ':'.join + + self.assertEqual(C()(['a', 'bc']), 'a:bc') + self.assertEqual(self.signature(C()), self.signature(''.join)) + + with self.subTest('MethodWrapperType'): + class C: + __call__ = (2).__pow__ + + self.assertEqual(C()(3), 8) + self.assertEqual(self.signature(C()), self.signature((0).__pow__)) + + with self.subTest('ClassMethodDescriptorType'): + class C(dict): + __call__ = dict.__dict__['fromkeys'] + + res = C()([1, 2], 3) + self.assertEqual(res, {1: 3, 2: 3}) + self.assertEqual(type(res), C) + self.assertEqual(self.signature(C()), self.signature(dict.fromkeys)) + + with self.subTest('MethodDescriptorType'): + class C(str): + __call__ = str.join + + self.assertEqual(C(':')(['a', 'bc']), 'a:bc') + self.assertEqual(self.signature(C()), self.signature(''.join)) + + with self.subTest('WrapperDescriptorType'): + class C(int): + __call__ = int.__pow__ + + self.assertEqual(C(2)(3), 8) + self.assertEqual(self.signature(C()), self.signature((0).__pow__)) + + with self.subTest('MemberDescriptorType'): + class C: + __slots__ = '__call__' + c = C() + c.__call__ = lambda a: a + self.assertEqual(c(1), 1) + self.assertEqual(self.signature(c), + ((('a', ..., ..., "positional_or_keyword"),), + ...)) + + def test_signature_on_wrapper(self): + class Wrapper: + def __call__(self, b): + pass + wrapper = Wrapper() + wrapper.__wrapped__ = lambda a: None + self.assertEqual(self.signature(wrapper), ((('a', ..., ..., "positional_or_keyword"),), ...)) # wrapper loop: - Wrapped.__wrapped__ = Wrapped + wrapper = Wrapper() + wrapper.__wrapped__ = wrapper with self.assertRaisesRegex(ValueError, 'wrapper loop'): - self.signature(Wrapped) + self.signature(wrapper) def test_signature_on_lambdas(self): self.assertEqual(self.signature((lambda a=10: a)), @@ -4433,6 +4795,14 @@ with self.assertRaisesRegex(ValueError, 'wrapper loop'): inspect.unwrap(obj) + def test_wrapped_descriptor(self): + self.assertIs(inspect.unwrap(NTimesUnwrappable), NTimesUnwrappable) + self.assertIs(inspect.unwrap(staticmethod), staticmethod) + self.assertIs(inspect.unwrap(classmethod), classmethod) + self.assertIs(inspect.unwrap(staticmethod(classmethod)), classmethod) + self.assertIs(inspect.unwrap(classmethod(staticmethod)), staticmethod) + + class TestMain(unittest.TestCase): def test_only_source(self): module = importlib.import_module('unittest') diff -Nru python3.11-3.11.8/Lib/test/test_io.py python3.11-3.11.9/Lib/test/test_io.py --- python3.11-3.11.8/Lib/test/test_io.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_io.py 2024-04-02 08:25:04.000000000 +0000 @@ -263,6 +263,27 @@ UnsupportedOperation = pyio.UnsupportedOperation +class MockCharPseudoDevFileIO(MockFileIO): + # GH-95782 + # ftruncate() does not work on these special files (and CPython then raises + # appropriate exceptions), so truncate() does not have to be accounted for + # here. + def __init__(self, data): + super().__init__(data) + + def seek(self, *args): + return 0 + + def tell(self, *args): + return 0 + +class CMockCharPseudoDevFileIO(MockCharPseudoDevFileIO, io.BytesIO): + pass + +class PyMockCharPseudoDevFileIO(MockCharPseudoDevFileIO, pyio.BytesIO): + pass + + class MockNonBlockWriterIO: def __init__(self): @@ -1556,6 +1577,30 @@ self.assertRaises(self.UnsupportedOperation, bufio.truncate) self.assertRaises(self.UnsupportedOperation, bufio.truncate, 0) + def test_tell_character_device_file(self): + # GH-95782 + # For the (former) bug in BufferedIO to manifest, the wrapped IO obj + # must be able to produce at least 2 bytes. + raw = self.MockCharPseudoDevFileIO(b"12") + buf = self.tp(raw) + self.assertEqual(buf.tell(), 0) + self.assertEqual(buf.read(1), b"1") + self.assertEqual(buf.tell(), 0) + + def test_seek_character_device_file(self): + raw = self.MockCharPseudoDevFileIO(b"12") + buf = self.tp(raw) + self.assertEqual(buf.seek(0, io.SEEK_CUR), 0) + self.assertEqual(buf.seek(1, io.SEEK_SET), 0) + self.assertEqual(buf.seek(0, io.SEEK_CUR), 0) + self.assertEqual(buf.read(1), b"1") + + # In the C implementation, tell() sets the BufferedIO's abs_pos to 0, + # which means that the next seek() could return a negative offset if it + # does not sanity-check: + self.assertEqual(buf.tell(), 0) + self.assertEqual(buf.seek(0, io.SEEK_CUR), 0) + class CBufferedReaderTest(BufferedReaderTest, SizeofTest): tp = io.BufferedReader @@ -2403,6 +2448,28 @@ f.flush() self.assertEqual(raw.getvalue(), b'a2c') + def test_read1_after_write(self): + with self.BytesIO(b'abcdef') as raw: + with self.tp(raw, 3) as f: + f.write(b"1") + self.assertEqual(f.read1(1), b'b') + f.flush() + self.assertEqual(raw.getvalue(), b'1bcdef') + with self.BytesIO(b'abcdef') as raw: + with self.tp(raw, 3) as f: + f.write(b"1") + self.assertEqual(f.read1(), b'bcd') + f.flush() + self.assertEqual(raw.getvalue(), b'1bcdef') + with self.BytesIO(b'abcdef') as raw: + with self.tp(raw, 3) as f: + f.write(b"1") + # XXX: read(100) returns different numbers of bytes + # in Python and C implementations. + self.assertEqual(f.read1(100)[:3], b'bcd') + f.flush() + self.assertEqual(raw.getvalue(), b'1bcdef') + def test_interleaved_readline_write(self): with self.BytesIO(b'ab\ncdef\ng\n') as raw: with self.tp(raw) as f: @@ -4768,7 +4835,7 @@ # classes in the __dict__ of each test. mocks = (MockRawIO, MisbehavedRawIO, MockFileIO, CloseFailureIO, MockNonBlockWriterIO, MockUnseekableIO, MockRawIOWithoutRead, - SlowFlushRawIO) + SlowFlushRawIO, MockCharPseudoDevFileIO) all_members = io.__all__ c_io_ns = {name : getattr(io, name) for name in all_members} py_io_ns = {name : getattr(pyio, name) for name in all_members} diff -Nru python3.11-3.11.8/Lib/test/test_iter.py python3.11-3.11.9/Lib/test/test_iter.py --- python3.11-3.11.8/Lib/test/test_iter.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_iter.py 2024-04-02 08:25:04.000000000 +0000 @@ -302,7 +302,7 @@ # listiter_reduce_general self.assertEqual( run("reversed", orig["reversed"](list(range(8)))), - (iter, ([],)) + (reversed, ([],)) ) for case in types: diff -Nru python3.11-3.11.8/Lib/test/test_linecache.py python3.11-3.11.9/Lib/test/test_linecache.py --- python3.11-3.11.8/Lib/test/test_linecache.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_linecache.py 2024-04-02 08:25:04.000000000 +0000 @@ -5,6 +5,7 @@ import os.path import tempfile import tokenize +from importlib.machinery import ModuleSpec from test import support from test.support import os_helper @@ -97,6 +98,16 @@ file_byte_string = b'# coding=utf-8\n\x80abc' +class FakeLoader: + def get_source(self, fullname): + return f'source for {fullname}' + + +class NoSourceLoader: + def get_source(self, fullname): + return None + + class LineCacheTests(unittest.TestCase): def test_getline(self): @@ -238,6 +249,33 @@ self.assertEqual(lines3, []) self.assertEqual(linecache.getlines(FILENAME), lines) + def test_loader(self): + filename = 'scheme://path' + + for loader in (None, object(), NoSourceLoader()): + linecache.clearcache() + module_globals = {'__name__': 'a.b.c', '__loader__': loader} + self.assertEqual(linecache.getlines(filename, module_globals), []) + + linecache.clearcache() + module_globals = {'__name__': 'a.b.c', '__loader__': FakeLoader()} + self.assertEqual(linecache.getlines(filename, module_globals), + ['source for a.b.c\n']) + + for spec in (None, object(), ModuleSpec('', FakeLoader())): + linecache.clearcache() + module_globals = {'__name__': 'a.b.c', '__loader__': FakeLoader(), + '__spec__': spec} + self.assertEqual(linecache.getlines(filename, module_globals), + ['source for a.b.c\n']) + + linecache.clearcache() + spec = ModuleSpec('x.y.z', FakeLoader()) + module_globals = {'__name__': 'a.b.c', '__loader__': spec.loader, + '__spec__': spec} + self.assertEqual(linecache.getlines(filename, module_globals), + ['source for x.y.z\n']) + class LineCacheInvalidationTests(unittest.TestCase): def setUp(self): diff -Nru python3.11-3.11.8/Lib/test/test_logging.py python3.11-3.11.9/Lib/test/test_logging.py --- python3.11-3.11.8/Lib/test/test_logging.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_logging.py 2024-04-02 08:25:04.000000000 +0000 @@ -5075,6 +5075,7 @@ self.assertEqual(record.levelno, logging.CRITICAL) self.assertEqual(record.msg, msg) self.assertEqual(record.args, (self.recording,)) + self.assertEqual(record.funcName, 'test_critical') def test_is_enabled_for(self): old_disable = self.adapter.logger.manager.disable @@ -5093,15 +5094,9 @@ self.assertFalse(self.adapter.hasHandlers()) def test_nested(self): - class Adapter(logging.LoggerAdapter): - prefix = 'Adapter' - - def process(self, msg, kwargs): - return f"{self.prefix} {msg}", kwargs - msg = 'Adapters can be nested, yo.' - adapter = Adapter(logger=self.logger, extra=None) - adapter_adapter = Adapter(logger=adapter, extra=None) + adapter = PrefixAdapter(logger=self.logger, extra=None) + adapter_adapter = PrefixAdapter(logger=adapter, extra=None) adapter_adapter.prefix = 'AdapterAdapter' self.assertEqual(repr(adapter), repr(adapter_adapter)) adapter_adapter.log(logging.CRITICAL, msg, self.recording) @@ -5110,6 +5105,7 @@ self.assertEqual(record.levelno, logging.CRITICAL) self.assertEqual(record.msg, f"Adapter AdapterAdapter {msg}") self.assertEqual(record.args, (self.recording,)) + self.assertEqual(record.funcName, 'test_nested') orig_manager = adapter_adapter.manager self.assertIs(adapter.manager, orig_manager) self.assertIs(self.logger.manager, orig_manager) @@ -5125,6 +5121,101 @@ self.assertIs(adapter.manager, orig_manager) self.assertIs(self.logger.manager, orig_manager) + def test_styled_adapter(self): + # Test an example from the Cookbook. + records = self.recording.records + adapter = StyleAdapter(self.logger) + adapter.warning('Hello, {}!', 'world') + self.assertEqual(str(records[-1].msg), 'Hello, world!') + self.assertEqual(records[-1].funcName, 'test_styled_adapter') + adapter.log(logging.WARNING, 'Goodbye {}.', 'world') + self.assertEqual(str(records[-1].msg), 'Goodbye world.') + self.assertEqual(records[-1].funcName, 'test_styled_adapter') + + def test_nested_styled_adapter(self): + records = self.recording.records + adapter = PrefixAdapter(self.logger) + adapter.prefix = '{}' + adapter2 = StyleAdapter(adapter) + adapter2.warning('Hello, {}!', 'world') + self.assertEqual(str(records[-1].msg), '{} Hello, world!') + self.assertEqual(records[-1].funcName, 'test_nested_styled_adapter') + adapter2.log(logging.WARNING, 'Goodbye {}.', 'world') + self.assertEqual(str(records[-1].msg), '{} Goodbye world.') + self.assertEqual(records[-1].funcName, 'test_nested_styled_adapter') + + def test_find_caller_with_stacklevel(self): + the_level = 1 + trigger = self.adapter.warning + + def innermost(): + trigger('test', stacklevel=the_level) + + def inner(): + innermost() + + def outer(): + inner() + + records = self.recording.records + outer() + self.assertEqual(records[-1].funcName, 'innermost') + lineno = records[-1].lineno + the_level += 1 + outer() + self.assertEqual(records[-1].funcName, 'inner') + self.assertGreater(records[-1].lineno, lineno) + lineno = records[-1].lineno + the_level += 1 + outer() + self.assertEqual(records[-1].funcName, 'outer') + self.assertGreater(records[-1].lineno, lineno) + lineno = records[-1].lineno + the_level += 1 + outer() + self.assertEqual(records[-1].funcName, 'test_find_caller_with_stacklevel') + self.assertGreater(records[-1].lineno, lineno) + + def test_extra_in_records(self): + self.adapter = logging.LoggerAdapter(logger=self.logger, + extra={'foo': '1'}) + + self.adapter.critical('foo should be here') + self.assertEqual(len(self.recording.records), 1) + record = self.recording.records[0] + self.assertTrue(hasattr(record, 'foo')) + self.assertEqual(record.foo, '1') + + def test_extra_not_merged_by_default(self): + self.adapter.critical('foo should NOT be here', extra={'foo': 'nope'}) + self.assertEqual(len(self.recording.records), 1) + record = self.recording.records[0] + self.assertFalse(hasattr(record, 'foo')) + + +class PrefixAdapter(logging.LoggerAdapter): + prefix = 'Adapter' + + def process(self, msg, kwargs): + return f"{self.prefix} {msg}", kwargs + + +class Message: + def __init__(self, fmt, args): + self.fmt = fmt + self.args = args + + def __str__(self): + return self.fmt.format(*self.args) + + +class StyleAdapter(logging.LoggerAdapter): + def log(self, level, msg, /, *args, stacklevel=1, **kwargs): + if self.isEnabledFor(level): + msg, kwargs = self.process(msg, kwargs) + self.logger.log(level, Message(msg, args), **kwargs, + stacklevel=stacklevel+1) + class LoggerTest(BaseTest, AssertErrorMessage): @@ -5563,6 +5654,52 @@ print(tf.read()) self.assertTrue(found, msg=msg) + def test_rollover_at_midnight(self, weekly=False): + os_helper.unlink(self.fn) + now = datetime.datetime.now() + atTime = now.time() + if not 0.1 < atTime.microsecond/1e6 < 0.9: + # The test requires all records to be emitted within + # the range of the same whole second. + time.sleep((0.1 - atTime.microsecond/1e6) % 1.0) + now = datetime.datetime.now() + atTime = now.time() + atTime = atTime.replace(microsecond=0) + fmt = logging.Formatter('%(asctime)s %(message)s') + when = f'W{now.weekday()}' if weekly else 'MIDNIGHT' + for i in range(3): + fh = logging.handlers.TimedRotatingFileHandler( + self.fn, encoding="utf-8", when=when, atTime=atTime) + fh.setFormatter(fmt) + r2 = logging.makeLogRecord({'msg': f'testing1 {i}'}) + fh.emit(r2) + fh.close() + self.assertLogFile(self.fn) + with open(self.fn, encoding="utf-8") as f: + for i, line in enumerate(f): + self.assertIn(f'testing1 {i}', line) + + os.utime(self.fn, (now.timestamp() - 1,)*2) + for i in range(2): + fh = logging.handlers.TimedRotatingFileHandler( + self.fn, encoding="utf-8", when=when, atTime=atTime) + fh.setFormatter(fmt) + r2 = logging.makeLogRecord({'msg': f'testing2 {i}'}) + fh.emit(r2) + fh.close() + rolloverDate = now - datetime.timedelta(days=7 if weekly else 1) + otherfn = f'{self.fn}.{rolloverDate:%Y-%m-%d}' + self.assertLogFile(otherfn) + with open(self.fn, encoding="utf-8") as f: + for i, line in enumerate(f): + self.assertIn(f'testing2 {i}', line) + with open(otherfn, encoding="utf-8") as f: + for i, line in enumerate(f): + self.assertIn(f'testing1 {i}', line) + + def test_rollover_at_weekday(self): + self.test_rollover_at_midnight(weekly=True) + def test_invalid(self): assertRaises = self.assertRaises assertRaises(ValueError, logging.handlers.TimedRotatingFileHandler, @@ -5572,22 +5709,47 @@ assertRaises(ValueError, logging.handlers.TimedRotatingFileHandler, self.fn, 'W7', encoding="utf-8", delay=True) + # TODO: Test for utc=False. def test_compute_rollover_daily_attime(self): currentTime = 0 + rh = logging.handlers.TimedRotatingFileHandler( + self.fn, encoding="utf-8", when='MIDNIGHT', + utc=True, atTime=None) + try: + actual = rh.computeRollover(currentTime) + self.assertEqual(actual, currentTime + 24 * 60 * 60) + + actual = rh.computeRollover(currentTime + 24 * 60 * 60 - 1) + self.assertEqual(actual, currentTime + 24 * 60 * 60) + + actual = rh.computeRollover(currentTime + 24 * 60 * 60) + self.assertEqual(actual, currentTime + 48 * 60 * 60) + + actual = rh.computeRollover(currentTime + 25 * 60 * 60) + self.assertEqual(actual, currentTime + 48 * 60 * 60) + finally: + rh.close() + atTime = datetime.time(12, 0, 0) rh = logging.handlers.TimedRotatingFileHandler( - self.fn, encoding="utf-8", when='MIDNIGHT', interval=1, backupCount=0, + self.fn, encoding="utf-8", when='MIDNIGHT', utc=True, atTime=atTime) try: actual = rh.computeRollover(currentTime) self.assertEqual(actual, currentTime + 12 * 60 * 60) + actual = rh.computeRollover(currentTime + 12 * 60 * 60 - 1) + self.assertEqual(actual, currentTime + 12 * 60 * 60) + + actual = rh.computeRollover(currentTime + 12 * 60 * 60) + self.assertEqual(actual, currentTime + 36 * 60 * 60) + actual = rh.computeRollover(currentTime + 13 * 60 * 60) self.assertEqual(actual, currentTime + 36 * 60 * 60) finally: rh.close() - #@unittest.skipIf(True, 'Temporarily skipped while failures investigated.') + # TODO: Test for utc=False. def test_compute_rollover_weekly_attime(self): currentTime = int(time.time()) today = currentTime - currentTime % 86400 @@ -5612,14 +5774,28 @@ expected += 12 * 60 * 60 # Add in adjustment for today expected += today + actual = rh.computeRollover(today) if actual != expected: print('failed in timezone: %d' % time.timezone) print('local vars: %s' % locals()) self.assertEqual(actual, expected) + + actual = rh.computeRollover(today + 12 * 60 * 60 - 1) + if actual != expected: + print('failed in timezone: %d' % time.timezone) + print('local vars: %s' % locals()) + self.assertEqual(actual, expected) + if day == wday: # goes into following week expected += 7 * 24 * 60 * 60 + actual = rh.computeRollover(today + 12 * 60 * 60) + if actual != expected: + print('failed in timezone: %d' % time.timezone) + print('local vars: %s' % locals()) + self.assertEqual(actual, expected) + actual = rh.computeRollover(today + 13 * 60 * 60) if actual != expected: print('failed in timezone: %d' % time.timezone) @@ -5637,7 +5813,7 @@ for i in range(10): times.append(dt.strftime('%Y-%m-%d_%H-%M-%S')) dt += datetime.timedelta(seconds=5) - prefixes = ('a.b', 'a.b.c', 'd.e', 'd.e.f') + prefixes = ('a.b', 'a.b.c', 'd.e', 'd.e.f', 'g') files = [] rotators = [] for prefix in prefixes: @@ -5650,10 +5826,22 @@ if prefix.startswith('a.b'): for t in times: files.append('%s.log.%s' % (prefix, t)) - else: - rotator.namer = lambda name: name.replace('.log', '') + '.log' + elif prefix.startswith('d.e'): + def namer(filename): + dirname, basename = os.path.split(filename) + basename = basename.replace('.log', '') + '.log' + return os.path.join(dirname, basename) + rotator.namer = namer for t in times: files.append('%s.%s.log' % (prefix, t)) + elif prefix == 'g': + def namer(filename): + dirname, basename = os.path.split(filename) + basename = 'g' + basename[6:] + '.oldlog' + return os.path.join(dirname, basename) + rotator.namer = namer + for t in times: + files.append('g%s.oldlog' % t) # Create empty files for fn in files: p = os.path.join(wd, fn) @@ -5663,18 +5851,423 @@ for i, prefix in enumerate(prefixes): rotator = rotators[i] candidates = rotator.getFilesToDelete() - self.assertEqual(len(candidates), 3) + self.assertEqual(len(candidates), 3, candidates) if prefix.startswith('a.b'): p = '%s.log.' % prefix for c in candidates: d, fn = os.path.split(c) self.assertTrue(fn.startswith(p)) - else: + elif prefix.startswith('d.e'): for c in candidates: d, fn = os.path.split(c) - self.assertTrue(fn.endswith('.log')) + self.assertTrue(fn.endswith('.log'), fn) self.assertTrue(fn.startswith(prefix + '.') and fn[len(prefix) + 2].isdigit()) + elif prefix == 'g': + for c in candidates: + d, fn = os.path.split(c) + self.assertTrue(fn.endswith('.oldlog')) + self.assertTrue(fn.startswith('g') and fn[1].isdigit()) + + def test_compute_files_to_delete_same_filename_different_extensions(self): + # See GH-93205 for background + wd = pathlib.Path(tempfile.mkdtemp(prefix='test_logging_')) + self.addCleanup(shutil.rmtree, wd) + times = [] + dt = datetime.datetime.now() + n_files = 10 + for _ in range(n_files): + times.append(dt.strftime('%Y-%m-%d_%H-%M-%S')) + dt += datetime.timedelta(seconds=5) + prefixes = ('a.log', 'a.log.b') + files = [] + rotators = [] + for i, prefix in enumerate(prefixes): + backupCount = i+1 + rotator = logging.handlers.TimedRotatingFileHandler(wd / prefix, when='s', + interval=5, + backupCount=backupCount, + delay=True) + rotators.append(rotator) + for t in times: + files.append('%s.%s' % (prefix, t)) + for t in times: + files.append('a.log.%s.c' % t) + # Create empty files + for f in files: + (wd / f).touch() + # Now the checks that only the correct files are offered up for deletion + for i, prefix in enumerate(prefixes): + backupCount = i+1 + rotator = rotators[i] + candidates = rotator.getFilesToDelete() + self.assertEqual(len(candidates), n_files - backupCount, candidates) + matcher = re.compile(r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}\Z") + for c in candidates: + d, fn = os.path.split(c) + self.assertTrue(fn.startswith(prefix+'.')) + suffix = fn[(len(prefix)+1):] + self.assertRegex(suffix, matcher) + + # Run with US-style DST rules: DST begins 2 a.m. on second Sunday in + # March (M3.2.0) and ends 2 a.m. on first Sunday in November (M11.1.0). + @support.run_with_tz('EST+05EDT,M3.2.0,M11.1.0') + def test_compute_rollover_MIDNIGHT_local(self): + # DST begins at 2012-3-11T02:00:00 and ends at 2012-11-4T02:00:00. + DT = datetime.datetime + def test(current, expected): + actual = fh.computeRollover(current.timestamp()) + diff = actual - expected.timestamp() + if diff: + self.assertEqual(diff, 0, datetime.timedelta(seconds=diff)) + + fh = logging.handlers.TimedRotatingFileHandler( + self.fn, encoding="utf-8", when='MIDNIGHT', utc=False) + + test(DT(2012, 3, 10, 23, 59, 59), DT(2012, 3, 11, 0, 0)) + test(DT(2012, 3, 11, 0, 0), DT(2012, 3, 12, 0, 0)) + test(DT(2012, 3, 11, 1, 0), DT(2012, 3, 12, 0, 0)) + + test(DT(2012, 11, 3, 23, 59, 59), DT(2012, 11, 4, 0, 0)) + test(DT(2012, 11, 4, 0, 0), DT(2012, 11, 5, 0, 0)) + test(DT(2012, 11, 4, 1, 0), DT(2012, 11, 5, 0, 0)) + + fh.close() + + fh = logging.handlers.TimedRotatingFileHandler( + self.fn, encoding="utf-8", when='MIDNIGHT', utc=False, + atTime=datetime.time(12, 0, 0)) + + test(DT(2012, 3, 10, 11, 59, 59), DT(2012, 3, 10, 12, 0)) + test(DT(2012, 3, 10, 12, 0), DT(2012, 3, 11, 12, 0)) + test(DT(2012, 3, 10, 13, 0), DT(2012, 3, 11, 12, 0)) + + test(DT(2012, 11, 3, 11, 59, 59), DT(2012, 11, 3, 12, 0)) + test(DT(2012, 11, 3, 12, 0), DT(2012, 11, 4, 12, 0)) + test(DT(2012, 11, 3, 13, 0), DT(2012, 11, 4, 12, 0)) + + fh.close() + + fh = logging.handlers.TimedRotatingFileHandler( + self.fn, encoding="utf-8", when='MIDNIGHT', utc=False, + atTime=datetime.time(2, 0, 0)) + + test(DT(2012, 3, 10, 1, 59, 59), DT(2012, 3, 10, 2, 0)) + # 2:00:00 is the same as 3:00:00 at 2012-3-11. + test(DT(2012, 3, 10, 2, 0), DT(2012, 3, 11, 3, 0)) + test(DT(2012, 3, 10, 3, 0), DT(2012, 3, 11, 3, 0)) + + test(DT(2012, 3, 11, 1, 59, 59), DT(2012, 3, 11, 3, 0)) + # No time between 2:00:00 and 3:00:00 at 2012-3-11. + test(DT(2012, 3, 11, 3, 0), DT(2012, 3, 12, 2, 0)) + test(DT(2012, 3, 11, 4, 0), DT(2012, 3, 12, 2, 0)) + + test(DT(2012, 11, 3, 1, 59, 59), DT(2012, 11, 3, 2, 0)) + test(DT(2012, 11, 3, 2, 0), DT(2012, 11, 4, 2, 0)) + test(DT(2012, 11, 3, 3, 0), DT(2012, 11, 4, 2, 0)) + + # 1:00:00-2:00:00 is repeated twice at 2012-11-4. + test(DT(2012, 11, 4, 1, 59, 59), DT(2012, 11, 4, 2, 0)) + test(DT(2012, 11, 4, 1, 59, 59, fold=1), DT(2012, 11, 4, 2, 0)) + test(DT(2012, 11, 4, 2, 0), DT(2012, 11, 5, 2, 0)) + test(DT(2012, 11, 4, 3, 0), DT(2012, 11, 5, 2, 0)) + + fh.close() + + fh = logging.handlers.TimedRotatingFileHandler( + self.fn, encoding="utf-8", when='MIDNIGHT', utc=False, + atTime=datetime.time(2, 30, 0)) + + test(DT(2012, 3, 10, 2, 29, 59), DT(2012, 3, 10, 2, 30)) + # No time 2:30:00 at 2012-3-11. + test(DT(2012, 3, 10, 2, 30), DT(2012, 3, 11, 3, 30)) + test(DT(2012, 3, 10, 3, 0), DT(2012, 3, 11, 3, 30)) + + test(DT(2012, 3, 11, 1, 59, 59), DT(2012, 3, 11, 3, 30)) + # No time between 2:00:00 and 3:00:00 at 2012-3-11. + test(DT(2012, 3, 11, 3, 0), DT(2012, 3, 12, 2, 30)) + test(DT(2012, 3, 11, 3, 30), DT(2012, 3, 12, 2, 30)) + + test(DT(2012, 11, 3, 2, 29, 59), DT(2012, 11, 3, 2, 30)) + test(DT(2012, 11, 3, 2, 30), DT(2012, 11, 4, 2, 30)) + test(DT(2012, 11, 3, 3, 0), DT(2012, 11, 4, 2, 30)) + + fh.close() + + fh = logging.handlers.TimedRotatingFileHandler( + self.fn, encoding="utf-8", when='MIDNIGHT', utc=False, + atTime=datetime.time(1, 30, 0)) + + test(DT(2012, 3, 11, 1, 29, 59), DT(2012, 3, 11, 1, 30)) + test(DT(2012, 3, 11, 1, 30), DT(2012, 3, 12, 1, 30)) + test(DT(2012, 3, 11, 1, 59, 59), DT(2012, 3, 12, 1, 30)) + # No time between 2:00:00 and 3:00:00 at 2012-3-11. + test(DT(2012, 3, 11, 3, 0), DT(2012, 3, 12, 1, 30)) + test(DT(2012, 3, 11, 3, 30), DT(2012, 3, 12, 1, 30)) + + # 1:00:00-2:00:00 is repeated twice at 2012-11-4. + test(DT(2012, 11, 4, 1, 0), DT(2012, 11, 4, 1, 30)) + test(DT(2012, 11, 4, 1, 29, 59), DT(2012, 11, 4, 1, 30)) + test(DT(2012, 11, 4, 1, 30), DT(2012, 11, 5, 1, 30)) + test(DT(2012, 11, 4, 1, 59, 59), DT(2012, 11, 5, 1, 30)) + # It is weird, but the rollover date jumps back from 2012-11-5 + # to 2012-11-4. + test(DT(2012, 11, 4, 1, 0, fold=1), DT(2012, 11, 4, 1, 30, fold=1)) + test(DT(2012, 11, 4, 1, 29, 59, fold=1), DT(2012, 11, 4, 1, 30, fold=1)) + test(DT(2012, 11, 4, 1, 30, fold=1), DT(2012, 11, 5, 1, 30)) + test(DT(2012, 11, 4, 1, 59, 59, fold=1), DT(2012, 11, 5, 1, 30)) + test(DT(2012, 11, 4, 2, 0), DT(2012, 11, 5, 1, 30)) + test(DT(2012, 11, 4, 2, 30), DT(2012, 11, 5, 1, 30)) + + fh.close() + + # Run with US-style DST rules: DST begins 2 a.m. on second Sunday in + # March (M3.2.0) and ends 2 a.m. on first Sunday in November (M11.1.0). + @support.run_with_tz('EST+05EDT,M3.2.0,M11.1.0') + def test_compute_rollover_W6_local(self): + # DST begins at 2012-3-11T02:00:00 and ends at 2012-11-4T02:00:00. + DT = datetime.datetime + def test(current, expected): + actual = fh.computeRollover(current.timestamp()) + diff = actual - expected.timestamp() + if diff: + self.assertEqual(diff, 0, datetime.timedelta(seconds=diff)) + + fh = logging.handlers.TimedRotatingFileHandler( + self.fn, encoding="utf-8", when='W6', utc=False) + + test(DT(2012, 3, 4, 23, 59, 59), DT(2012, 3, 5, 0, 0)) + test(DT(2012, 3, 5, 0, 0), DT(2012, 3, 12, 0, 0)) + test(DT(2012, 3, 5, 1, 0), DT(2012, 3, 12, 0, 0)) + + test(DT(2012, 10, 28, 23, 59, 59), DT(2012, 10, 29, 0, 0)) + test(DT(2012, 10, 29, 0, 0), DT(2012, 11, 5, 0, 0)) + test(DT(2012, 10, 29, 1, 0), DT(2012, 11, 5, 0, 0)) + + fh.close() + + fh = logging.handlers.TimedRotatingFileHandler( + self.fn, encoding="utf-8", when='W6', utc=False, + atTime=datetime.time(0, 0, 0)) + + test(DT(2012, 3, 10, 23, 59, 59), DT(2012, 3, 11, 0, 0)) + test(DT(2012, 3, 11, 0, 0), DT(2012, 3, 18, 0, 0)) + test(DT(2012, 3, 11, 1, 0), DT(2012, 3, 18, 0, 0)) + + test(DT(2012, 11, 3, 23, 59, 59), DT(2012, 11, 4, 0, 0)) + test(DT(2012, 11, 4, 0, 0), DT(2012, 11, 11, 0, 0)) + test(DT(2012, 11, 4, 1, 0), DT(2012, 11, 11, 0, 0)) + + fh.close() + + fh = logging.handlers.TimedRotatingFileHandler( + self.fn, encoding="utf-8", when='W6', utc=False, + atTime=datetime.time(12, 0, 0)) + + test(DT(2012, 3, 4, 11, 59, 59), DT(2012, 3, 4, 12, 0)) + test(DT(2012, 3, 4, 12, 0), DT(2012, 3, 11, 12, 0)) + test(DT(2012, 3, 4, 13, 0), DT(2012, 3, 11, 12, 0)) + + test(DT(2012, 10, 28, 11, 59, 59), DT(2012, 10, 28, 12, 0)) + test(DT(2012, 10, 28, 12, 0), DT(2012, 11, 4, 12, 0)) + test(DT(2012, 10, 28, 13, 0), DT(2012, 11, 4, 12, 0)) + + fh.close() + + fh = logging.handlers.TimedRotatingFileHandler( + self.fn, encoding="utf-8", when='W6', utc=False, + atTime=datetime.time(2, 0, 0)) + + test(DT(2012, 3, 4, 1, 59, 59), DT(2012, 3, 4, 2, 0)) + # 2:00:00 is the same as 3:00:00 at 2012-3-11. + test(DT(2012, 3, 4, 2, 0), DT(2012, 3, 11, 3, 0)) + test(DT(2012, 3, 4, 3, 0), DT(2012, 3, 11, 3, 0)) + + test(DT(2012, 3, 11, 1, 59, 59), DT(2012, 3, 11, 3, 0)) + # No time between 2:00:00 and 3:00:00 at 2012-3-11. + test(DT(2012, 3, 11, 3, 0), DT(2012, 3, 18, 2, 0)) + test(DT(2012, 3, 11, 4, 0), DT(2012, 3, 18, 2, 0)) + + test(DT(2012, 10, 28, 1, 59, 59), DT(2012, 10, 28, 2, 0)) + test(DT(2012, 10, 28, 2, 0), DT(2012, 11, 4, 2, 0)) + test(DT(2012, 10, 28, 3, 0), DT(2012, 11, 4, 2, 0)) + + # 1:00:00-2:00:00 is repeated twice at 2012-11-4. + test(DT(2012, 11, 4, 1, 59, 59), DT(2012, 11, 4, 2, 0)) + test(DT(2012, 11, 4, 1, 59, 59, fold=1), DT(2012, 11, 4, 2, 0)) + test(DT(2012, 11, 4, 2, 0), DT(2012, 11, 11, 2, 0)) + test(DT(2012, 11, 4, 3, 0), DT(2012, 11, 11, 2, 0)) + + fh.close() + + fh = logging.handlers.TimedRotatingFileHandler( + self.fn, encoding="utf-8", when='W6', utc=False, + atTime=datetime.time(2, 30, 0)) + + test(DT(2012, 3, 4, 2, 29, 59), DT(2012, 3, 4, 2, 30)) + # No time 2:30:00 at 2012-3-11. + test(DT(2012, 3, 4, 2, 30), DT(2012, 3, 11, 3, 30)) + test(DT(2012, 3, 4, 3, 0), DT(2012, 3, 11, 3, 30)) + + test(DT(2012, 3, 11, 1, 59, 59), DT(2012, 3, 11, 3, 30)) + # No time between 2:00:00 and 3:00:00 at 2012-3-11. + test(DT(2012, 3, 11, 3, 0), DT(2012, 3, 18, 2, 30)) + test(DT(2012, 3, 11, 3, 30), DT(2012, 3, 18, 2, 30)) + + test(DT(2012, 10, 28, 2, 29, 59), DT(2012, 10, 28, 2, 30)) + test(DT(2012, 10, 28, 2, 30), DT(2012, 11, 4, 2, 30)) + test(DT(2012, 10, 28, 3, 0), DT(2012, 11, 4, 2, 30)) + + fh.close() + + fh = logging.handlers.TimedRotatingFileHandler( + self.fn, encoding="utf-8", when='W6', utc=False, + atTime=datetime.time(1, 30, 0)) + + test(DT(2012, 3, 11, 1, 29, 59), DT(2012, 3, 11, 1, 30)) + test(DT(2012, 3, 11, 1, 30), DT(2012, 3, 18, 1, 30)) + test(DT(2012, 3, 11, 1, 59, 59), DT(2012, 3, 18, 1, 30)) + # No time between 2:00:00 and 3:00:00 at 2012-3-11. + test(DT(2012, 3, 11, 3, 0), DT(2012, 3, 18, 1, 30)) + test(DT(2012, 3, 11, 3, 30), DT(2012, 3, 18, 1, 30)) + + # 1:00:00-2:00:00 is repeated twice at 2012-11-4. + test(DT(2012, 11, 4, 1, 0), DT(2012, 11, 4, 1, 30)) + test(DT(2012, 11, 4, 1, 29, 59), DT(2012, 11, 4, 1, 30)) + test(DT(2012, 11, 4, 1, 30), DT(2012, 11, 11, 1, 30)) + test(DT(2012, 11, 4, 1, 59, 59), DT(2012, 11, 11, 1, 30)) + # It is weird, but the rollover date jumps back from 2012-11-11 + # to 2012-11-4. + test(DT(2012, 11, 4, 1, 0, fold=1), DT(2012, 11, 4, 1, 30, fold=1)) + test(DT(2012, 11, 4, 1, 29, 59, fold=1), DT(2012, 11, 4, 1, 30, fold=1)) + test(DT(2012, 11, 4, 1, 30, fold=1), DT(2012, 11, 11, 1, 30)) + test(DT(2012, 11, 4, 1, 59, 59, fold=1), DT(2012, 11, 11, 1, 30)) + test(DT(2012, 11, 4, 2, 0), DT(2012, 11, 11, 1, 30)) + test(DT(2012, 11, 4, 2, 30), DT(2012, 11, 11, 1, 30)) + + fh.close() + + # Run with US-style DST rules: DST begins 2 a.m. on second Sunday in + # March (M3.2.0) and ends 2 a.m. on first Sunday in November (M11.1.0). + @support.run_with_tz('EST+05EDT,M3.2.0,M11.1.0') + def test_compute_rollover_MIDNIGHT_local_interval(self): + # DST begins at 2012-3-11T02:00:00 and ends at 2012-11-4T02:00:00. + DT = datetime.datetime + def test(current, expected): + actual = fh.computeRollover(current.timestamp()) + diff = actual - expected.timestamp() + if diff: + self.assertEqual(diff, 0, datetime.timedelta(seconds=diff)) + + fh = logging.handlers.TimedRotatingFileHandler( + self.fn, encoding="utf-8", when='MIDNIGHT', utc=False, interval=3) + + test(DT(2012, 3, 8, 23, 59, 59), DT(2012, 3, 11, 0, 0)) + test(DT(2012, 3, 9, 0, 0), DT(2012, 3, 12, 0, 0)) + test(DT(2012, 3, 9, 1, 0), DT(2012, 3, 12, 0, 0)) + test(DT(2012, 3, 10, 23, 59, 59), DT(2012, 3, 13, 0, 0)) + test(DT(2012, 3, 11, 0, 0), DT(2012, 3, 14, 0, 0)) + test(DT(2012, 3, 11, 1, 0), DT(2012, 3, 14, 0, 0)) + + test(DT(2012, 11, 1, 23, 59, 59), DT(2012, 11, 4, 0, 0)) + test(DT(2012, 11, 2, 0, 0), DT(2012, 11, 5, 0, 0)) + test(DT(2012, 11, 2, 1, 0), DT(2012, 11, 5, 0, 0)) + test(DT(2012, 11, 3, 23, 59, 59), DT(2012, 11, 6, 0, 0)) + test(DT(2012, 11, 4, 0, 0), DT(2012, 11, 7, 0, 0)) + test(DT(2012, 11, 4, 1, 0), DT(2012, 11, 7, 0, 0)) + + fh.close() + + fh = logging.handlers.TimedRotatingFileHandler( + self.fn, encoding="utf-8", when='MIDNIGHT', utc=False, interval=3, + atTime=datetime.time(12, 0, 0)) + + test(DT(2012, 3, 8, 11, 59, 59), DT(2012, 3, 10, 12, 0)) + test(DT(2012, 3, 8, 12, 0), DT(2012, 3, 11, 12, 0)) + test(DT(2012, 3, 8, 13, 0), DT(2012, 3, 11, 12, 0)) + test(DT(2012, 3, 10, 11, 59, 59), DT(2012, 3, 12, 12, 0)) + test(DT(2012, 3, 10, 12, 0), DT(2012, 3, 13, 12, 0)) + test(DT(2012, 3, 10, 13, 0), DT(2012, 3, 13, 12, 0)) + + test(DT(2012, 11, 1, 11, 59, 59), DT(2012, 11, 3, 12, 0)) + test(DT(2012, 11, 1, 12, 0), DT(2012, 11, 4, 12, 0)) + test(DT(2012, 11, 1, 13, 0), DT(2012, 11, 4, 12, 0)) + test(DT(2012, 11, 3, 11, 59, 59), DT(2012, 11, 5, 12, 0)) + test(DT(2012, 11, 3, 12, 0), DT(2012, 11, 6, 12, 0)) + test(DT(2012, 11, 3, 13, 0), DT(2012, 11, 6, 12, 0)) + + fh.close() + + # Run with US-style DST rules: DST begins 2 a.m. on second Sunday in + # March (M3.2.0) and ends 2 a.m. on first Sunday in November (M11.1.0). + @support.run_with_tz('EST+05EDT,M3.2.0,M11.1.0') + def test_compute_rollover_W6_local_interval(self): + # DST begins at 2012-3-11T02:00:00 and ends at 2012-11-4T02:00:00. + DT = datetime.datetime + def test(current, expected): + actual = fh.computeRollover(current.timestamp()) + diff = actual - expected.timestamp() + if diff: + self.assertEqual(diff, 0, datetime.timedelta(seconds=diff)) + + fh = logging.handlers.TimedRotatingFileHandler( + self.fn, encoding="utf-8", when='W6', utc=False, interval=3) + + test(DT(2012, 2, 19, 23, 59, 59), DT(2012, 3, 5, 0, 0)) + test(DT(2012, 2, 20, 0, 0), DT(2012, 3, 12, 0, 0)) + test(DT(2012, 2, 20, 1, 0), DT(2012, 3, 12, 0, 0)) + test(DT(2012, 3, 4, 23, 59, 59), DT(2012, 3, 19, 0, 0)) + test(DT(2012, 3, 5, 0, 0), DT(2012, 3, 26, 0, 0)) + test(DT(2012, 3, 5, 1, 0), DT(2012, 3, 26, 0, 0)) + + test(DT(2012, 10, 14, 23, 59, 59), DT(2012, 10, 29, 0, 0)) + test(DT(2012, 10, 15, 0, 0), DT(2012, 11, 5, 0, 0)) + test(DT(2012, 10, 15, 1, 0), DT(2012, 11, 5, 0, 0)) + test(DT(2012, 10, 28, 23, 59, 59), DT(2012, 11, 12, 0, 0)) + test(DT(2012, 10, 29, 0, 0), DT(2012, 11, 19, 0, 0)) + test(DT(2012, 10, 29, 1, 0), DT(2012, 11, 19, 0, 0)) + + fh.close() + + fh = logging.handlers.TimedRotatingFileHandler( + self.fn, encoding="utf-8", when='W6', utc=False, interval=3, + atTime=datetime.time(0, 0, 0)) + + test(DT(2012, 2, 25, 23, 59, 59), DT(2012, 3, 11, 0, 0)) + test(DT(2012, 2, 26, 0, 0), DT(2012, 3, 18, 0, 0)) + test(DT(2012, 2, 26, 1, 0), DT(2012, 3, 18, 0, 0)) + test(DT(2012, 3, 10, 23, 59, 59), DT(2012, 3, 25, 0, 0)) + test(DT(2012, 3, 11, 0, 0), DT(2012, 4, 1, 0, 0)) + test(DT(2012, 3, 11, 1, 0), DT(2012, 4, 1, 0, 0)) + + test(DT(2012, 10, 20, 23, 59, 59), DT(2012, 11, 4, 0, 0)) + test(DT(2012, 10, 21, 0, 0), DT(2012, 11, 11, 0, 0)) + test(DT(2012, 10, 21, 1, 0), DT(2012, 11, 11, 0, 0)) + test(DT(2012, 11, 3, 23, 59, 59), DT(2012, 11, 18, 0, 0)) + test(DT(2012, 11, 4, 0, 0), DT(2012, 11, 25, 0, 0)) + test(DT(2012, 11, 4, 1, 0), DT(2012, 11, 25, 0, 0)) + + fh.close() + + fh = logging.handlers.TimedRotatingFileHandler( + self.fn, encoding="utf-8", when='W6', utc=False, interval=3, + atTime=datetime.time(12, 0, 0)) + + test(DT(2012, 2, 18, 11, 59, 59), DT(2012, 3, 4, 12, 0)) + test(DT(2012, 2, 19, 12, 0), DT(2012, 3, 11, 12, 0)) + test(DT(2012, 2, 19, 13, 0), DT(2012, 3, 11, 12, 0)) + test(DT(2012, 3, 4, 11, 59, 59), DT(2012, 3, 18, 12, 0)) + test(DT(2012, 3, 4, 12, 0), DT(2012, 3, 25, 12, 0)) + test(DT(2012, 3, 4, 13, 0), DT(2012, 3, 25, 12, 0)) + + test(DT(2012, 10, 14, 11, 59, 59), DT(2012, 10, 28, 12, 0)) + test(DT(2012, 10, 14, 12, 0), DT(2012, 11, 4, 12, 0)) + test(DT(2012, 10, 14, 13, 0), DT(2012, 11, 4, 12, 0)) + test(DT(2012, 10, 28, 11, 59, 59), DT(2012, 11, 11, 12, 0)) + test(DT(2012, 10, 28, 12, 0), DT(2012, 11, 18, 12, 0)) + test(DT(2012, 10, 28, 13, 0), DT(2012, 11, 18, 12, 0)) + + fh.close() def secs(**kw): @@ -5688,40 +6281,49 @@ # current time (epoch start) is a Thursday, W0 means Monday ('W0', secs(days=4, hours=24)), ): - def test_compute_rollover(self, when=when, exp=exp): - rh = logging.handlers.TimedRotatingFileHandler( - self.fn, encoding="utf-8", when=when, interval=1, backupCount=0, utc=True) - currentTime = 0.0 - actual = rh.computeRollover(currentTime) - if exp != actual: - # Failures occur on some systems for MIDNIGHT and W0. - # Print detailed calculation for MIDNIGHT so we can try to see - # what's going on - if when == 'MIDNIGHT': - try: - if rh.utc: - t = time.gmtime(currentTime) - else: - t = time.localtime(currentTime) - currentHour = t[3] - currentMinute = t[4] - currentSecond = t[5] - # r is the number of seconds left between now and midnight - r = logging.handlers._MIDNIGHT - ((currentHour * 60 + - currentMinute) * 60 + - currentSecond) - result = currentTime + r - print('t: %s (%s)' % (t, rh.utc), file=sys.stderr) - print('currentHour: %s' % currentHour, file=sys.stderr) - print('currentMinute: %s' % currentMinute, file=sys.stderr) - print('currentSecond: %s' % currentSecond, file=sys.stderr) - print('r: %s' % r, file=sys.stderr) - print('result: %s' % result, file=sys.stderr) - except Exception as e: - print('exception in diagnostic code: %s' % e, file=sys.stderr) - self.assertEqual(exp, actual) - rh.close() - setattr(TimedRotatingFileHandlerTest, "test_compute_rollover_%s" % when, test_compute_rollover) + for interval in 1, 3: + def test_compute_rollover(self, when=when, interval=interval, exp=exp): + rh = logging.handlers.TimedRotatingFileHandler( + self.fn, encoding="utf-8", when=when, interval=interval, backupCount=0, utc=True) + currentTime = 0.0 + actual = rh.computeRollover(currentTime) + if when.startswith('W'): + exp += secs(days=7*(interval-1)) + else: + exp *= interval + if exp != actual: + # Failures occur on some systems for MIDNIGHT and W0. + # Print detailed calculation for MIDNIGHT so we can try to see + # what's going on + if when == 'MIDNIGHT': + try: + if rh.utc: + t = time.gmtime(currentTime) + else: + t = time.localtime(currentTime) + currentHour = t[3] + currentMinute = t[4] + currentSecond = t[5] + # r is the number of seconds left between now and midnight + r = logging.handlers._MIDNIGHT - ((currentHour * 60 + + currentMinute) * 60 + + currentSecond) + result = currentTime + r + print('t: %s (%s)' % (t, rh.utc), file=sys.stderr) + print('currentHour: %s' % currentHour, file=sys.stderr) + print('currentMinute: %s' % currentMinute, file=sys.stderr) + print('currentSecond: %s' % currentSecond, file=sys.stderr) + print('r: %s' % r, file=sys.stderr) + print('result: %s' % result, file=sys.stderr) + except Exception as e: + print('exception in diagnostic code: %s' % e, file=sys.stderr) + self.assertEqual(exp, actual) + rh.close() + name = "test_compute_rollover_%s" % when + if interval > 1: + name += "_interval" + test_compute_rollover.__name__ = name + setattr(TimedRotatingFileHandlerTest, name, test_compute_rollover) @unittest.skipUnless(win32evtlog, 'win32evtlog/win32evtlogutil/pywintypes required for this test.') diff -Nru python3.11-3.11.8/Lib/test/test_lzma.py python3.11-3.11.9/Lib/test/test_lzma.py --- python3.11-3.11.8/Lib/test/test_lzma.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_lzma.py 2024-04-02 08:25:04.000000000 +0000 @@ -2,7 +2,6 @@ import array from io import BytesIO, UnsupportedOperation, DEFAULT_BUFFER_SIZE import os -import pathlib import pickle import random import sys @@ -12,7 +11,7 @@ from test.support import _4G, bigmemtest from test.support.import_helper import import_module from test.support.os_helper import ( - TESTFN, unlink + TESTFN, unlink, FakePath ) lzma = import_module("lzma") @@ -544,7 +543,7 @@ pass def test_init_with_PathLike_filename(self): - filename = pathlib.Path(TESTFN) + filename = FakePath(TESTFN) with TempFile(filename, COMPRESSED_XZ): with LZMAFile(filename) as f: self.assertEqual(f.read(), INPUT) @@ -581,11 +580,10 @@ self.addCleanup(unlink, TESTFN) for mode in ("x", "xb"): unlink(TESTFN) - with LZMAFile(TESTFN, mode): + with LZMAFile(TESTFN, mode) as f: pass with self.assertRaises(FileExistsError): - with LZMAFile(TESTFN, mode): - pass + LZMAFile(TESTFN, mode) def test_init_bad_mode(self): with self.assertRaises(ValueError): @@ -866,17 +864,59 @@ with LZMAFile(TESTFN) as f: self.assertEqual(f.read(), INPUT) self.assertEqual(f.read(), b"") + self.assertIsInstance(f.fileno(), int) + self.assertIs(f.readable(), True) + self.assertIs(f.writable(), False) + self.assertIs(f.seekable(), True) + self.assertIs(f.closed, False) + self.assertIs(f.closed, True) + self.assertRaises(ValueError, f.fileno) + self.assertRaises(ValueError, f.readable) + self.assertRaises(ValueError, f.writable) + self.assertRaises(ValueError, f.seekable) def test_read_from_file_with_bytes_filename(self): - try: - bytes_filename = TESTFN.encode("ascii") - except UnicodeEncodeError: - self.skipTest("Temporary file name needs to be ASCII") + bytes_filename = os.fsencode(TESTFN) with TempFile(TESTFN, COMPRESSED_XZ): with LZMAFile(bytes_filename) as f: self.assertEqual(f.read(), INPUT) self.assertEqual(f.read(), b"") + def test_read_from_fileobj(self): + with TempFile(TESTFN, COMPRESSED_XZ): + with open(TESTFN, 'rb') as raw: + with LZMAFile(raw) as f: + self.assertEqual(f.read(), INPUT) + self.assertEqual(f.read(), b"") + self.assertEqual(f.fileno(), raw.fileno()) + self.assertIs(f.readable(), True) + self.assertIs(f.writable(), False) + self.assertIs(f.seekable(), True) + self.assertIs(f.closed, False) + self.assertIs(f.closed, True) + self.assertRaises(ValueError, f.fileno) + self.assertRaises(ValueError, f.readable) + self.assertRaises(ValueError, f.writable) + self.assertRaises(ValueError, f.seekable) + + def test_read_from_fileobj_with_int_name(self): + with TempFile(TESTFN, COMPRESSED_XZ): + fd = os.open(TESTFN, os.O_RDONLY) + with open(fd, 'rb') as raw: + with LZMAFile(raw) as f: + self.assertEqual(f.read(), INPUT) + self.assertEqual(f.read(), b"") + self.assertEqual(f.fileno(), raw.fileno()) + self.assertIs(f.readable(), True) + self.assertIs(f.writable(), False) + self.assertIs(f.seekable(), True) + self.assertIs(f.closed, False) + self.assertIs(f.closed, True) + self.assertRaises(ValueError, f.fileno) + self.assertRaises(ValueError, f.readable) + self.assertRaises(ValueError, f.writable) + self.assertRaises(ValueError, f.seekable) + def test_read_incomplete(self): with LZMAFile(BytesIO(COMPRESSED_XZ[:128])) as f: self.assertRaises(EOFError, f.read) @@ -1059,6 +1099,17 @@ try: with LZMAFile(TESTFN, "w") as f: f.write(INPUT) + self.assertIsInstance(f.fileno(), int) + self.assertIs(f.readable(), False) + self.assertIs(f.writable(), True) + self.assertIs(f.seekable(), False) + self.assertIs(f.closed, False) + self.assertIs(f.closed, True) + self.assertRaises(ValueError, f.fileno) + self.assertRaises(ValueError, f.readable) + self.assertRaises(ValueError, f.writable) + self.assertRaises(ValueError, f.seekable) + expected = lzma.compress(INPUT) with open(TESTFN, "rb") as f: self.assertEqual(f.read(), expected) @@ -1066,10 +1117,7 @@ unlink(TESTFN) def test_write_to_file_with_bytes_filename(self): - try: - bytes_filename = TESTFN.encode("ascii") - except UnicodeEncodeError: - self.skipTest("Temporary file name needs to be ASCII") + bytes_filename = os.fsencode(TESTFN) try: with LZMAFile(bytes_filename, "w") as f: f.write(INPUT) @@ -1079,6 +1127,51 @@ finally: unlink(TESTFN) + def test_write_to_fileobj(self): + try: + with open(TESTFN, "wb") as raw: + with LZMAFile(raw, "w") as f: + f.write(INPUT) + self.assertEqual(f.fileno(), raw.fileno()) + self.assertIs(f.readable(), False) + self.assertIs(f.writable(), True) + self.assertIs(f.seekable(), False) + self.assertIs(f.closed, False) + self.assertIs(f.closed, True) + self.assertRaises(ValueError, f.fileno) + self.assertRaises(ValueError, f.readable) + self.assertRaises(ValueError, f.writable) + self.assertRaises(ValueError, f.seekable) + + expected = lzma.compress(INPUT) + with open(TESTFN, "rb") as f: + self.assertEqual(f.read(), expected) + finally: + unlink(TESTFN) + + def test_write_to_fileobj_with_int_name(self): + try: + fd = os.open(TESTFN, os.O_WRONLY | os.O_CREAT | os.O_TRUNC) + with open(fd, 'wb') as raw: + with LZMAFile(raw, "w") as f: + f.write(INPUT) + self.assertEqual(f.fileno(), raw.fileno()) + self.assertIs(f.readable(), False) + self.assertIs(f.writable(), True) + self.assertIs(f.seekable(), False) + self.assertIs(f.closed, False) + self.assertIs(f.closed, True) + self.assertRaises(ValueError, f.fileno) + self.assertRaises(ValueError, f.readable) + self.assertRaises(ValueError, f.writable) + self.assertRaises(ValueError, f.seekable) + + expected = lzma.compress(INPUT) + with open(TESTFN, "rb") as f: + self.assertEqual(f.read(), expected) + finally: + unlink(TESTFN) + def test_write_append_to_file(self): part1 = INPUT[:1024] part2 = INPUT[1024:1536] @@ -1284,7 +1377,7 @@ self.assertEqual(f.read(), INPUT * 2) def test_with_pathlike_filename(self): - filename = pathlib.Path(TESTFN) + filename = FakePath(TESTFN) with TempFile(filename): with lzma.open(filename, "wb") as f: f.write(INPUT) diff -Nru python3.11-3.11.8/Lib/test/test_mimetypes.py python3.11-3.11.9/Lib/test/test_mimetypes.py --- python3.11-3.11.8/Lib/test/test_mimetypes.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_mimetypes.py 2024-04-02 08:25:04.000000000 +0000 @@ -1,5 +1,6 @@ import io import mimetypes +import os import pathlib import sys import unittest.mock @@ -111,15 +112,40 @@ # compared to when interpreted as filename because of the semicolon. eq = self.assertEqual gzip_expected = ('application/x-tar', 'gzip') - eq(self.db.guess_type(";1.tar.gz"), gzip_expected) - eq(self.db.guess_type("?1.tar.gz"), gzip_expected) - eq(self.db.guess_type("#1.tar.gz"), gzip_expected) - eq(self.db.guess_type("#1#.tar.gz"), gzip_expected) - eq(self.db.guess_type(";1#.tar.gz"), gzip_expected) - eq(self.db.guess_type(";&1=123;?.tar.gz"), gzip_expected) - eq(self.db.guess_type("?k1=v1&k2=v2.tar.gz"), gzip_expected) + for name in ( + ';1.tar.gz', + '?1.tar.gz', + '#1.tar.gz', + '#1#.tar.gz', + ';1#.tar.gz', + ';&1=123;?.tar.gz', + '?k1=v1&k2=v2.tar.gz', + ): + for prefix in ('', '/', '\\', + 'c:', 'c:/', 'c:\\', 'c:/d/', 'c:\\d\\', + '//share/server/', '\\\\share\\server\\'): + path = prefix + name + with self.subTest(path=path): + eq(self.db.guess_type(path), gzip_expected) + expected = (None, None) if os.name == 'nt' else gzip_expected + for prefix in ('//', '\\\\', '//share/', '\\\\share\\'): + path = prefix + name + with self.subTest(path=path): + eq(self.db.guess_type(path), expected) eq(self.db.guess_type(r" \"\`;b&b&c |.tar.gz"), gzip_expected) + def test_url(self): + result = self.db.guess_type('http://host.html') + msg = 'URL only has a host name, not a file' + self.assertSequenceEqual(result, (None, None), msg) + result = self.db.guess_type('http://example.com/host.html') + msg = 'Should be text/html' + self.assertSequenceEqual(result, ('text/html', None), msg) + result = self.db.guess_type('http://example.com/host.html#x.tar') + self.assertSequenceEqual(result, ('text/html', None)) + result = self.db.guess_type('http://example.com/host.html?q=x.tar') + self.assertSequenceEqual(result, ('text/html', None)) + def test_guess_all_types(self): # First try strict. Use a set here for testing the results because if # test_urllib2 is run before test_mimetypes, global state is modified diff -Nru python3.11-3.11.8/Lib/test/test_named_expressions.py python3.11-3.11.9/Lib/test/test_named_expressions.py --- python3.11-3.11.8/Lib/test/test_named_expressions.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_named_expressions.py 2024-04-02 08:25:04.000000000 +0000 @@ -222,6 +222,16 @@ with self.assertRaisesRegex(SyntaxError, msg): exec(f"lambda: {code}", {}) # Function scope + def test_named_expression_invalid_mangled_class_variables(self): + code = """class Foo: + def bar(self): + [[(__x:=2) for _ in range(2)] for __x in range(2)] + """ + + with self.assertRaisesRegex(SyntaxError, + "assignment expression cannot rebind comprehension iteration variable '__x'"): + exec(code, {}, {}) + class NamedExpressionAssignmentTest(unittest.TestCase): @@ -598,6 +608,18 @@ for idx, elem in enumerate(genexp): self.assertEqual(elem, b[idx] + a) + def test_named_expression_scope_mangled_names(self): + class Foo: + def f(self_): + global __x1 + __x1 = 0 + [_Foo__x1 := 1 for a in [2]] + self.assertEqual(__x1, 1) + [__x1 := 2 for a in [3]] + self.assertEqual(__x1, 2) + + Foo().f() + self.assertEqual(_Foo__x1, 2) if __name__ == "__main__": unittest.main() diff -Nru python3.11-3.11.8/Lib/test/test_operator.py python3.11-3.11.9/Lib/test/test_operator.py --- python3.11-3.11.8/Lib/test/test_operator.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_operator.py 2024-04-02 08:25:04.000000000 +0000 @@ -1,6 +1,8 @@ import unittest import pickle import sys +from decimal import Decimal +from fractions import Fraction from test import support from test.support import import_helper @@ -508,6 +510,44 @@ self.assertEqual(operator.ixor (c, 5), "ixor") self.assertEqual(operator.iconcat (c, c), "iadd") + def test_iconcat_without_getitem(self): + operator = self.module + + msg = "'int' object can't be concatenated" + with self.assertRaisesRegex(TypeError, msg): + operator.iconcat(1, 0.5) + + def test_index(self): + operator = self.module + class X: + def __index__(self): + return 1 + + self.assertEqual(operator.index(X()), 1) + self.assertEqual(operator.index(0), 0) + self.assertEqual(operator.index(1), 1) + self.assertEqual(operator.index(2), 2) + with self.assertRaises((AttributeError, TypeError)): + operator.index(1.5) + with self.assertRaises((AttributeError, TypeError)): + operator.index(Fraction(3, 7)) + with self.assertRaises((AttributeError, TypeError)): + operator.index(Decimal(1)) + with self.assertRaises((AttributeError, TypeError)): + operator.index(None) + + def test_not_(self): + operator = self.module + class C: + def __bool__(self): + raise SyntaxError + self.assertRaises(TypeError, operator.not_) + self.assertRaises(SyntaxError, operator.not_, C()) + self.assertFalse(operator.not_(5)) + self.assertFalse(operator.not_([0])) + self.assertTrue(operator.not_(0)) + self.assertTrue(operator.not_([])) + def test_length_hint(self): operator = self.module class X(object): @@ -533,6 +573,13 @@ with self.assertRaises(LookupError): operator.length_hint(X(LookupError)) + class Y: pass + + msg = "'str' object cannot be interpreted as an integer" + with self.assertRaisesRegex(TypeError, msg): + operator.length_hint(X(2), "abc") + self.assertEqual(operator.length_hint(Y(), 10), 10) + def test_call(self): operator = self.module diff -Nru python3.11-3.11.8/Lib/test/test_os.py python3.11-3.11.9/Lib/test/test_os.py --- python3.11-3.11.8/Lib/test/test_os.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_os.py 2024-04-02 08:25:04.000000000 +0000 @@ -1278,6 +1278,7 @@ class WalkTests(unittest.TestCase): """Tests for os.walk().""" + is_fwalk = False # Wrapper to hide minor differences between os.walk and os.fwalk # to tests both functions with the same code base @@ -1312,14 +1313,14 @@ self.sub11_path = join(self.sub1_path, "SUB11") sub2_path = join(self.walk_path, "SUB2") sub21_path = join(sub2_path, "SUB21") - tmp1_path = join(self.walk_path, "tmp1") + self.tmp1_path = join(self.walk_path, "tmp1") tmp2_path = join(self.sub1_path, "tmp2") tmp3_path = join(sub2_path, "tmp3") tmp5_path = join(sub21_path, "tmp3") self.link_path = join(sub2_path, "link") t2_path = join(os_helper.TESTFN, "TEST2") tmp4_path = join(os_helper.TESTFN, "TEST2", "tmp4") - broken_link_path = join(sub2_path, "broken_link") + self.broken_link_path = join(sub2_path, "broken_link") broken_link2_path = join(sub2_path, "broken_link2") broken_link3_path = join(sub2_path, "broken_link3") @@ -1329,13 +1330,13 @@ os.makedirs(sub21_path) os.makedirs(t2_path) - for path in tmp1_path, tmp2_path, tmp3_path, tmp4_path, tmp5_path: + for path in self.tmp1_path, tmp2_path, tmp3_path, tmp4_path, tmp5_path: with open(path, "x", encoding='utf-8') as f: f.write("I'm " + path + " and proud of it. Blame test_os.\n") if os_helper.can_symlink(): os.symlink(os.path.abspath(t2_path), self.link_path) - os.symlink('broken', broken_link_path, True) + os.symlink('broken', self.broken_link_path, True) os.symlink(join('tmp3', 'broken'), broken_link2_path, True) os.symlink(join('SUB21', 'tmp5'), broken_link3_path, True) self.sub2_tree = (sub2_path, ["SUB21", "link"], @@ -1431,6 +1432,11 @@ else: self.fail("Didn't follow symlink with followlinks=True") + walk_it = self.walk(self.broken_link_path, follow_symlinks=True) + if self.is_fwalk: + self.assertRaises(FileNotFoundError, next, walk_it) + self.assertRaises(StopIteration, next, walk_it) + def test_walk_bad_dir(self): # Walk top-down. errors = [] @@ -1452,6 +1458,73 @@ finally: os.rename(path1new, path1) + def test_walk_bad_dir2(self): + walk_it = self.walk('nonexisting') + if self.is_fwalk: + self.assertRaises(FileNotFoundError, next, walk_it) + self.assertRaises(StopIteration, next, walk_it) + + walk_it = self.walk('nonexisting', follow_symlinks=True) + if self.is_fwalk: + self.assertRaises(FileNotFoundError, next, walk_it) + self.assertRaises(StopIteration, next, walk_it) + + walk_it = self.walk(self.tmp1_path) + self.assertRaises(StopIteration, next, walk_it) + + walk_it = self.walk(self.tmp1_path, follow_symlinks=True) + if self.is_fwalk: + self.assertRaises(NotADirectoryError, next, walk_it) + self.assertRaises(StopIteration, next, walk_it) + + @unittest.skipUnless(hasattr(os, "mkfifo"), 'requires os.mkfifo()') + @unittest.skipIf(sys.platform == "vxworks", + "fifo requires special path on VxWorks") + def test_walk_named_pipe(self): + path = os_helper.TESTFN + '-pipe' + os.mkfifo(path) + self.addCleanup(os.unlink, path) + + walk_it = self.walk(path) + self.assertRaises(StopIteration, next, walk_it) + + walk_it = self.walk(path, follow_symlinks=True) + if self.is_fwalk: + self.assertRaises(NotADirectoryError, next, walk_it) + self.assertRaises(StopIteration, next, walk_it) + + @unittest.skipUnless(hasattr(os, "mkfifo"), 'requires os.mkfifo()') + @unittest.skipIf(sys.platform == "vxworks", + "fifo requires special path on VxWorks") + def test_walk_named_pipe2(self): + path = os_helper.TESTFN + '-dir' + os.mkdir(path) + self.addCleanup(shutil.rmtree, path) + os.mkfifo(os.path.join(path, 'mypipe')) + + errors = [] + walk_it = self.walk(path, onerror=errors.append) + next(walk_it) + self.assertRaises(StopIteration, next, walk_it) + self.assertEqual(errors, []) + + errors = [] + walk_it = self.walk(path, onerror=errors.append) + root, dirs, files = next(walk_it) + self.assertEqual(root, path) + self.assertEqual(dirs, []) + self.assertEqual(files, ['mypipe']) + dirs.extend(files) + files.clear() + if self.is_fwalk: + self.assertRaises(NotADirectoryError, next, walk_it) + self.assertRaises(StopIteration, next, walk_it) + if self.is_fwalk: + self.assertEqual(errors, []) + else: + self.assertEqual(len(errors), 1, errors) + self.assertIsInstance(errors[0], NotADirectoryError) + def test_walk_many_open_files(self): depth = 30 base = os.path.join(os_helper.TESTFN, 'deep') @@ -1477,6 +1550,7 @@ @unittest.skipUnless(hasattr(os, 'fwalk'), "Test needs os.fwalk()") class FwalkTests(WalkTests): """Tests for os.fwalk().""" + is_fwalk = True def walk(self, top, **kwargs): for root, dirs, files, root_fd in self.fwalk(top, **kwargs): @@ -3320,22 +3394,22 @@ """Tests for os.getpriority() and os.setpriority().""" def test_set_get_priority(self): - base = os.getpriority(os.PRIO_PROCESS, os.getpid()) - os.setpriority(os.PRIO_PROCESS, os.getpid(), base + 1) - try: - new_prio = os.getpriority(os.PRIO_PROCESS, os.getpid()) - if base >= 19 and new_prio <= 19: - raise unittest.SkipTest("unable to reliably test setpriority " - "at current nice level of %s" % base) - else: - self.assertEqual(new_prio, base + 1) - finally: - try: - os.setpriority(os.PRIO_PROCESS, os.getpid(), base) - except OSError as err: - if err.errno != errno.EACCES: - raise + code = f"""if 1: + import os + os.setpriority(os.PRIO_PROCESS, os.getpid(), {base} + 1) + print(os.getpriority(os.PRIO_PROCESS, os.getpid())) + """ + + # Subprocess inherits the current process' priority. + _, out, _ = assert_python_ok("-c", code) + new_prio = int(out) + # nice value cap is 19 for linux and 20 for FreeBSD + if base >= 19 and new_prio <= base: + raise unittest.SkipTest("unable to reliably test setpriority " + "at current nice level of %s" % base) + else: + self.assertEqual(new_prio, base + 1) @unittest.skipUnless(hasattr(os, 'sendfile'), "test needs os.sendfile()") diff -Nru python3.11-3.11.8/Lib/test/test_pathlib.py python3.11-3.11.9/Lib/test/test_pathlib.py --- python3.11-3.11.8/Lib/test/test_pathlib.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_pathlib.py 2024-04-02 08:25:04.000000000 +0000 @@ -2430,15 +2430,15 @@ self.assertIs((P / 'fileA\x00').is_char_device(), False) def test_is_char_device_true(self): - # Under Unix, /dev/null should generally be a char device. - P = self.cls('/dev/null') + # os.devnull should generally be a char device. + P = self.cls(os.devnull) if not P.exists(): - self.skipTest("/dev/null required") + self.skipTest("null device required") self.assertTrue(P.is_char_device()) self.assertFalse(P.is_block_device()) self.assertFalse(P.is_file()) - self.assertIs(self.cls('/dev/null\udfff').is_char_device(), False) - self.assertIs(self.cls('/dev/null\x00').is_char_device(), False) + self.assertIs(self.cls(f'{os.devnull}\udfff').is_char_device(), False) + self.assertIs(self.cls(f'{os.devnull}\x00').is_char_device(), False) def test_pickling_common(self): p = self.cls(BASE, 'fileA') diff -Nru python3.11-3.11.8/Lib/test/test_pdb.py python3.11-3.11.9/Lib/test/test_pdb.py --- python3.11-3.11.8/Lib/test/test_pdb.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_pdb.py 2024-04-02 08:25:04.000000000 +0000 @@ -1785,13 +1785,30 @@ ) return stdout, stderr - def run_pdb_script(self, script, commands, expected_returncode=0): + def run_pdb_script(self, script, commands, + expected_returncode=0, + pdbrc=None, + remove_home=False): """Run 'script' lines with pdb and the pdb 'commands'.""" filename = 'main.py' with open(filename, 'w') as f: f.write(textwrap.dedent(script)) + + if pdbrc is not None: + with open('.pdbrc', 'w') as f: + f.write(textwrap.dedent(pdbrc)) + self.addCleanup(os_helper.unlink, '.pdbrc') self.addCleanup(os_helper.unlink, filename) - return self._run_pdb([filename], commands, expected_returncode) + + homesave = None + if remove_home: + homesave = os.environ.pop('HOME', None) + try: + stdout, stderr = self._run_pdb([filename], commands, expected_returncode) + finally: + if homesave is not None: + os.environ['HOME'] = homesave + return stdout, stderr def run_pdb_module(self, script, commands): """Runs the script code as part of a module""" @@ -1862,6 +1879,18 @@ ('bœr', 1), ) + def test_spec(self): + # Test that __main__.__spec__ is set to None when running a script + script = """ + import __main__ + print(__main__.__spec__) + """ + + commands = "continue" + + stdout, _ = self.run_pdb_script(script, commands) + self.assertIn('None', stdout) + def test_issue7964(self): # open the file as binary so we can force \r\n newline with open(os_helper.TESTFN, 'wb') as f: @@ -2019,37 +2048,99 @@ self.assertRegex(res, "Restarting .* with arguments:\na b c") self.assertRegex(res, "Restarting .* with arguments:\nd e f") - def test_readrc_kwarg(self): + def test_pdbrc_basic(self): script = textwrap.dedent(""" - import pdb; pdb.Pdb(readrc=False).set_trace() + a = 1 + b = 2 + """) - print('hello') + pdbrc = textwrap.dedent(""" + # Comments should be fine + n + p f"{a+8=}" """) - save_home = os.environ.pop('HOME', None) - try: - with os_helper.temp_cwd(): - with open('.pdbrc', 'w') as f: - f.write("invalid\n") - - with open('main.py', 'w') as f: - f.write(script) - - cmd = [sys.executable, 'main.py'] - proc = subprocess.Popen( - cmd, - stdout=subprocess.PIPE, - stdin=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - with proc: - stdout, stderr = proc.communicate(b'q\n') - self.assertNotIn(b"NameError: name 'invalid' is not defined", - stdout) + stdout, stderr = self.run_pdb_script(script, 'q\n', pdbrc=pdbrc, remove_home=True) + self.assertNotIn("SyntaxError", stdout) + self.assertIn("a+8=9", stdout) - finally: - if save_home is not None: - os.environ['HOME'] = save_home + def test_pdbrc_empty_line(self): + """Test that empty lines in .pdbrc are ignored.""" + + script = textwrap.dedent(""" + a = 1 + b = 2 + c = 3 + """) + + pdbrc = textwrap.dedent(""" + n + + """) + + stdout, stderr = self.run_pdb_script(script, 'q\n', pdbrc=pdbrc, remove_home=True) + self.assertIn("b = 2", stdout) + self.assertNotIn("c = 3", stdout) + + def test_pdbrc_alias(self): + script = textwrap.dedent(""" + class A: + def __init__(self): + self.attr = 1 + a = A() + b = 2 + """) + + pdbrc = textwrap.dedent(""" + alias pi for k in %1.__dict__.keys(): print(f"%1.{k} = {%1.__dict__[k]}") + until 6 + pi a + """) + + stdout, stderr = self.run_pdb_script(script, 'q\n', pdbrc=pdbrc, remove_home=True) + self.assertIn("a.attr = 1", stdout) + + def test_pdbrc_semicolon(self): + script = textwrap.dedent(""" + class A: + def __init__(self): + self.attr = 1 + a = A() + b = 2 + """) + + pdbrc = textwrap.dedent(""" + b 5;;c;;n + """) + + stdout, stderr = self.run_pdb_script(script, 'q\n', pdbrc=pdbrc, remove_home=True) + self.assertIn("-> b = 2", stdout) + + def test_pdbrc_commands(self): + script = textwrap.dedent(""" + class A: + def __init__(self): + self.attr = 1 + a = A() + b = 2 + """) + + pdbrc = textwrap.dedent(""" + b 6 + commands 1 ;; p a;; end + c + """) + + stdout, stderr = self.run_pdb_script(script, 'q\n', pdbrc=pdbrc, remove_home=True) + self.assertIn("<__main__.A object at", stdout) + + def test_readrc_kwarg(self): + script = textwrap.dedent(""" + print('hello') + """) + + stdout, stderr = self.run_pdb_script(script, 'q\n', pdbrc='invalid', remove_home=True) + self.assertIn("NameError: name 'invalid' is not defined", stdout) def test_readrc_homedir(self): save_home = os.environ.pop("HOME", None) @@ -2064,40 +2155,6 @@ if save_home is not None: os.environ["HOME"] = save_home - def test_read_pdbrc_with_ascii_encoding(self): - script = textwrap.dedent(""" - import pdb; pdb.Pdb().set_trace() - print('hello') - """) - save_home = os.environ.pop('HOME', None) - try: - with os_helper.temp_cwd(): - with open('.pdbrc', 'w', encoding='utf-8') as f: - f.write("Fran\u00E7ais") - - with open('main.py', 'w', encoding='utf-8') as f: - f.write(script) - - cmd = [sys.executable, 'main.py'] - env = {'PYTHONIOENCODING': 'ascii'} - if sys.platform == 'win32': - env['PYTHONLEGACYWINDOWSSTDIO'] = 'non-empty-string' - proc = subprocess.Popen( - cmd, - stdout=subprocess.PIPE, - stdin=subprocess.PIPE, - stderr=subprocess.PIPE, - env={**os.environ, **env} - ) - with proc: - stdout, stderr = proc.communicate(b'c\n') - self.assertIn(b"UnicodeEncodeError: \'ascii\' codec can\'t encode character " - b"\'\\xe7\' in position 21: ordinal not in range(128)", stderr) - - finally: - if save_home is not None: - os.environ['HOME'] = save_home - def test_header(self): stdout = StringIO() header = 'Nobody expects... blah, blah, blah' diff -Nru python3.11-3.11.8/Lib/test/test_platform.py python3.11-3.11.9/Lib/test/test_platform.py --- python3.11-3.11.8/Lib/test/test_platform.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_platform.py 2024-04-02 08:25:04.000000000 +0000 @@ -317,8 +317,36 @@ if sys.platform == 'java': self.assertTrue(all(res)) + @unittest.skipUnless(support.MS_WINDOWS, 'This test only makes sense on Windows') def test_win32_ver(self): - res = platform.win32_ver() + release1, version1, csd1, ptype1 = 'a', 'b', 'c', 'd' + res = platform.win32_ver(release1, version1, csd1, ptype1) + self.assertEqual(len(res), 4) + release, version, csd, ptype = res + if release: + # Currently, release names always come from internal dicts, + # but this could change over time. For now, we just check that + # release is something different from what we have passed. + self.assertNotEqual(release, release1) + if version: + # It is rather hard to test explicit version without + # going deep into the details. + self.assertIn('.', version) + for v in version.split('.'): + int(v) # should not fail + if csd: + self.assertTrue(csd.startswith('SP'), msg=csd) + if ptype: + if os.cpu_count() > 1: + self.assertIn('Multiprocessor', ptype) + else: + self.assertIn('Uniprocessor', ptype) + + @unittest.skipIf(support.MS_WINDOWS, 'This test only makes sense on non Windows') + def test_win32_ver_on_non_windows(self): + release, version, csd, ptype = 'a', '1.0', 'c', 'd' + res = platform.win32_ver(release, version, csd, ptype) + self.assertSequenceEqual(res, (release, version, csd, ptype), seq_type=tuple) def test_mac_ver(self): res = platform.mac_ver() diff -Nru python3.11-3.11.8/Lib/test/test_posix.py python3.11-3.11.9/Lib/test/test_posix.py --- python3.11-3.11.8/Lib/test/test_posix.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_posix.py 2024-04-02 08:25:04.000000000 +0000 @@ -1327,12 +1327,21 @@ def test_sched_setaffinity(self): mask = posix.sched_getaffinity(0) self.addCleanup(posix.sched_setaffinity, 0, list(mask)) + if len(mask) > 1: # Empty masks are forbidden mask.pop() posix.sched_setaffinity(0, mask) self.assertEqual(posix.sched_getaffinity(0), mask) - self.assertRaises(OSError, posix.sched_setaffinity, 0, []) + + try: + posix.sched_setaffinity(0, []) + # gh-117061: On RHEL9, sched_setaffinity(0, []) does not fail + except OSError: + # sched_setaffinity() manual page documents EINVAL error + # when the mask is empty. + pass + self.assertRaises(ValueError, posix.sched_setaffinity, 0, [-10]) self.assertRaises(ValueError, posix.sched_setaffinity, 0, map(int, "0X")) self.assertRaises(OverflowError, posix.sched_setaffinity, 0, [1<<128]) diff -Nru python3.11-3.11.8/Lib/test/test_property.py python3.11-3.11.9/Lib/test/test_property.py --- python3.11-3.11.8/Lib/test/test_property.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_property.py 2024-04-02 08:25:04.000000000 +0000 @@ -183,6 +183,24 @@ fake_prop.__init__('fget', 'fset', 'fdel', 'doc') self.assertAlmostEqual(gettotalrefcount() - refs_before, 0, delta=10) + @support.refcount_test + def test_gh_115618(self): + # Py_XDECREF() was improperly called for None argument + # in property methods. + gettotalrefcount = support.get_attribute(sys, 'gettotalrefcount') + prop = property() + refs_before = gettotalrefcount() + for i in range(100): + prop = prop.getter(None) + self.assertIsNone(prop.fget) + for i in range(100): + prop = prop.setter(None) + self.assertIsNone(prop.fset) + for i in range(100): + prop = prop.deleter(None) + self.assertIsNone(prop.fdel) + self.assertAlmostEqual(gettotalrefcount() - refs_before, 0, delta=10) + @unittest.skipIf(sys.flags.optimize >= 2, "Docstrings are omitted with -O2 and above") def test_class_property(self): @@ -242,6 +260,7 @@ class PropertySubclassTests(unittest.TestCase): + @support.requires_docstrings def test_slots_docstring_copy_exception(self): try: class Foo(object): diff -Nru python3.11-3.11.8/Lib/test/test_py_compile.py python3.11-3.11.9/Lib/test/test_py_compile.py --- python3.11-3.11.8/Lib/test/test_py_compile.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_py_compile.py 2024-04-02 08:25:04.000000000 +0000 @@ -227,7 +227,8 @@ def setUp(self): self.directory = tempfile.mkdtemp() self.source_path = os.path.join(self.directory, '_test.py') - self.cache_path = importlib.util.cache_from_source(self.source_path) + self.cache_path = importlib.util.cache_from_source(self.source_path, + optimization='' if __debug__ else 1) with open(self.source_path, 'w') as file: file.write('x = 123\n') @@ -249,6 +250,7 @@ return script_helper.assert_python_failure('-m', 'py_compile', *args) def test_stdin(self): + self.assertFalse(os.path.exists(self.cache_path)) result = self.pycompilecmd('-', input=self.source_path) self.assertEqual(result.returncode, 0) self.assertEqual(result.stdout, b'') diff -Nru python3.11-3.11.8/Lib/test/test_pydoc/__init__.py python3.11-3.11.9/Lib/test/test_pydoc/__init__.py --- python3.11-3.11.8/Lib/test/test_pydoc/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_pydoc/__init__.py 2024-04-02 08:25:04.000000000 +0000 @@ -0,0 +1,6 @@ +import os +from test import support + + +def load_tests(*args): + return support.load_package_tests(os.path.dirname(__file__), *args) diff -Nru python3.11-3.11.8/Lib/test/test_pydoc/pydoc_mod.py python3.11-3.11.9/Lib/test/test_pydoc/pydoc_mod.py --- python3.11-3.11.8/Lib/test/test_pydoc/pydoc_mod.py 1970-01-01 00:00:00.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_pydoc/pydoc_mod.py 2024-04-02 08:25:04.000000000 +0000 @@ -0,0 +1,51 @@ +"""This is a test module for test_pydoc""" + +from __future__ import print_function + +import types +import typing + +__author__ = "Benjamin Peterson" +__credits__ = "Nobody" +__version__ = "1.2.3.4" +__xyz__ = "X, Y and Z" + +class A: + """Hello and goodbye""" + def __init__(): + """Wow, I have no function!""" + pass + +class B(object): + NO_MEANING: str = "eggs" + pass + +class C(object): + def say_no(self): + return "no" + def get_answer(self): + """ Return say_no() """ + return self.say_no() + def is_it_true(self): + """ Return self.get_answer() """ + return self.get_answer() + def __class_getitem__(self, item): + return types.GenericAlias(self, item) + +def doc_func(): + """ + This function solves all of the world's problems: + hunger + lack of Python + war + """ + +def nodoc_func(): + pass + + +list_alias1 = typing.List[int] +list_alias2 = list[int] +c_alias = C[int] +type_union1 = typing.Union[int, str] +type_union2 = int | str diff -Nru python3.11-3.11.8/Lib/test/test_pydoc/pydocfodder.py python3.11-3.11.9/Lib/test/test_pydoc/pydocfodder.py --- python3.11-3.11.8/Lib/test/test_pydoc/pydocfodder.py 1970-01-01 00:00:00.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_pydoc/pydocfodder.py 2024-04-02 08:25:04.000000000 +0000 @@ -0,0 +1,184 @@ +"""Something just to look at via pydoc.""" + +import types + +def global_func(x, y): + """Module global function""" + +def global_func2(x, y): + """Module global function 2""" + +class A: + "A class." + + def A_method(self): + "Method defined in A." + def AB_method(self): + "Method defined in A and B." + def AC_method(self): + "Method defined in A and C." + def AD_method(self): + "Method defined in A and D." + def ABC_method(self): + "Method defined in A, B and C." + def ABD_method(self): + "Method defined in A, B and D." + def ACD_method(self): + "Method defined in A, C and D." + def ABCD_method(self): + "Method defined in A, B, C and D." + + def A_classmethod(cls, x): + "A class method defined in A." + A_classmethod = classmethod(A_classmethod) + + def A_staticmethod(x, y): + "A static method defined in A." + A_staticmethod = staticmethod(A_staticmethod) + + def _getx(self): + "A property getter function." + def _setx(self, value): + "A property setter function." + def _delx(self): + "A property deleter function." + A_property = property(fdel=_delx, fget=_getx, fset=_setx, + doc="A sample property defined in A.") + + A_int_alias = int + +class B(A): + "A class, derived from A." + + def AB_method(self): + "Method defined in A and B." + def ABC_method(self): + "Method defined in A, B and C." + def ABD_method(self): + "Method defined in A, B and D." + def ABCD_method(self): + "Method defined in A, B, C and D." + def B_method(self): + "Method defined in B." + def BC_method(self): + "Method defined in B and C." + def BD_method(self): + "Method defined in B and D." + def BCD_method(self): + "Method defined in B, C and D." + + @classmethod + def B_classmethod(cls, x): + "A class method defined in B." + + global_func = global_func # same name + global_func_alias = global_func + global_func2_alias = global_func2 + B_classmethod_alias = B_classmethod + A_classmethod_ref = A.A_classmethod + A_staticmethod = A.A_staticmethod # same name + A_staticmethod_alias = A.A_staticmethod + A_method_ref = A().A_method + A_method_alias = A.A_method + B_method_alias = B_method + __repr__ = object.__repr__ # same name + object_repr = object.__repr__ + get = {}.get # same name + dict_get = {}.get + +B.B_classmethod_ref = B.B_classmethod + + +class C(A): + "A class, derived from A." + + def AC_method(self): + "Method defined in A and C." + def ABC_method(self): + "Method defined in A, B and C." + def ACD_method(self): + "Method defined in A, C and D." + def ABCD_method(self): + "Method defined in A, B, C and D." + def BC_method(self): + "Method defined in B and C." + def BCD_method(self): + "Method defined in B, C and D." + def C_method(self): + "Method defined in C." + def CD_method(self): + "Method defined in C and D." + +class D(B, C): + """A class, derived from B and C. + """ + + def AD_method(self): + "Method defined in A and D." + def ABD_method(self): + "Method defined in A, B and D." + def ACD_method(self): + "Method defined in A, C and D." + def ABCD_method(self): + "Method defined in A, B, C and D." + def BD_method(self): + "Method defined in B and D." + def BCD_method(self): + "Method defined in B, C and D." + def CD_method(self): + "Method defined in C and D." + def D_method(self): + "Method defined in D." + +class FunkyProperties(object): + """From SF bug 472347, by Roeland Rengelink. + + Property getters etc may not be vanilla functions or methods, + and this used to make GUI pydoc blow up. + """ + + def __init__(self): + self.desc = {'x':0} + + class get_desc: + def __init__(self, attr): + self.attr = attr + def __call__(self, inst): + print('Get called', self, inst) + return inst.desc[self.attr] + class set_desc: + def __init__(self, attr): + self.attr = attr + def __call__(self, inst, val): + print('Set called', self, inst, val) + inst.desc[self.attr] = val + class del_desc: + def __init__(self, attr): + self.attr = attr + def __call__(self, inst): + print('Del called', self, inst) + del inst.desc[self.attr] + + x = property(get_desc('x'), set_desc('x'), del_desc('x'), 'prop x') + + +submodule = types.ModuleType(__name__ + '.submodule', + """A submodule, which should appear in its parent's summary""") + +global_func_alias = global_func +A_classmethod = A.A_classmethod # same name +A_classmethod2 = A.A_classmethod +A_classmethod3 = B.A_classmethod +A_staticmethod = A.A_staticmethod # same name +A_staticmethod_alias = A.A_staticmethod +A_staticmethod_ref = A().A_staticmethod +A_staticmethod_ref2 = B().A_staticmethod +A_method = A().A_method # same name +A_method2 = A().A_method +A_method3 = B().A_method +B_method = B.B_method # same name +B_method2 = B.B_method +count = list.count # same name +list_count = list.count +get = {}.get # same name +dict_get = {}.get diff -Nru python3.11-3.11.8/Lib/test/test_pydoc/test_pydoc.py python3.11-3.11.9/Lib/test/test_pydoc/test_pydoc.py --- python3.11-3.11.8/Lib/test/test_pydoc/test_pydoc.py 1970-01-01 00:00:00.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_pydoc/test_pydoc.py 2024-04-02 08:25:04.000000000 +0000 @@ -0,0 +1,1803 @@ +import os +import sys +import contextlib +import importlib.util +import inspect +import pydoc +import py_compile +import keyword +import _pickle +import pkgutil +import re +import stat +import tempfile +import test.support +import types +import typing +import unittest +import urllib.parse +import xml.etree +import xml.etree.ElementTree +import textwrap +from io import StringIO +from collections import namedtuple +from urllib.request import urlopen, urlcleanup +from test import support +from test.support import import_helper +from test.support import os_helper +from test.support.script_helper import (assert_python_ok, + assert_python_failure, spawn_python) +from test.support import threading_helper +from test.support import (reap_children, captured_output, captured_stdout, + captured_stderr, is_emscripten, is_wasi, + requires_docstrings, MISSING_C_DOCSTRINGS) +from test.support.os_helper import (TESTFN, rmtree, unlink) +from test.test_pydoc import pydoc_mod +from test.test_pydoc import pydocfodder + + +class nonascii: + 'Це не латиниця' + pass + +if test.support.HAVE_DOCSTRINGS: + expected_data_docstrings = ( + 'dictionary for instance variables', + 'list of weak references to the object', + ) * 2 +else: + expected_data_docstrings = ('', '', '', '') + +expected_text_pattern = """ +NAME + test.test_pydoc.pydoc_mod - This is a test module for test_pydoc +%s +CLASSES + builtins.object + A + B + C +\x20\x20\x20\x20 + class A(builtins.object) + | Hello and goodbye + |\x20\x20 + | Methods defined here: + |\x20\x20 + | __init__() + | Wow, I have no function! + |\x20\x20 + | ---------------------------------------------------------------------- + | Data descriptors defined here: + |\x20\x20 + | __dict__%s + |\x20\x20 + | __weakref__%s +\x20\x20\x20\x20 + class B(builtins.object) + | Data descriptors defined here: + |\x20\x20 + | __dict__%s + |\x20\x20 + | __weakref__%s + |\x20\x20 + | ---------------------------------------------------------------------- + | Data and other attributes defined here: + |\x20\x20 + | NO_MEANING = 'eggs' + |\x20\x20 + | __annotations__ = {'NO_MEANING': } +\x20\x20\x20\x20 + class C(builtins.object) + | Methods defined here: + |\x20\x20 + | get_answer(self) + | Return say_no() + |\x20\x20 + | is_it_true(self) + | Return self.get_answer() + |\x20\x20 + | say_no(self) + |\x20\x20 + | ---------------------------------------------------------------------- + | Class methods defined here: + |\x20\x20 + | __class_getitem__(item) + |\x20\x20 + | ---------------------------------------------------------------------- + | Data descriptors defined here: + |\x20\x20 + | __dict__ + | dictionary for instance variables + |\x20\x20 + | __weakref__ + | list of weak references to the object + +FUNCTIONS + doc_func() + This function solves all of the world's problems: + hunger + lack of Python + war +\x20\x20\x20\x20 + nodoc_func() + +DATA + __xyz__ = 'X, Y and Z' + c_alias = test.test_pydoc.pydoc_mod.C[int] + list_alias1 = typing.List[int] + list_alias2 = list[int] + type_union1 = typing.Union[int, str] + type_union2 = int | str + +VERSION + 1.2.3.4 + +AUTHOR + Benjamin Peterson + +CREDITS + Nobody + +FILE + %s +""".strip() + +expected_text_data_docstrings = tuple('\n | ' + s if s else '' + for s in expected_data_docstrings) + +html2text_of_expected = """ +test.test_pydoc.pydoc_mod (version 1.2.3.4) +This is a test module for test_pydoc + +Modules + types + typing + +Classes + builtins.object + A + B + C + +class A(builtins.object) + Hello and goodbye + + Methods defined here: + __init__() + Wow, I have no function! + ---------------------------------------------------------------------- + Data descriptors defined here: + __dict__ + dictionary for instance variables + __weakref__ + list of weak references to the object + +class B(builtins.object) + Data descriptors defined here: + __dict__ + dictionary for instance variables + __weakref__ + list of weak references to the object + ---------------------------------------------------------------------- + Data and other attributes defined here: + NO_MEANING = 'eggs' + __annotations__ = {'NO_MEANING': } + + +class C(builtins.object) + Methods defined here: + get_answer(self) + Return say_no() + is_it_true(self) + Return self.get_answer() + say_no(self) + ---------------------------------------------------------------------- + Class methods defined here: + __class_getitem__(item) + ---------------------------------------------------------------------- + Data descriptors defined here: + __dict__ + dictionary for instance variables + __weakref__ + list of weak references to the object + +Functions + doc_func() + This function solves all of the world's problems: + hunger + lack of Python + war + nodoc_func() + +Data + __xyz__ = 'X, Y and Z' + c_alias = test.test_pydoc.pydoc_mod.C[int] + list_alias1 = typing.List[int] + list_alias2 = list[int] + type_union1 = typing.Union[int, str] + type_union2 = int | str + +Author + Benjamin Peterson + +Credits + Nobody +""" + +expected_html_data_docstrings = tuple(s.replace(' ', ' ') + for s in expected_data_docstrings) + +# output pattern for missing module +missing_pattern = '''\ +No Python documentation found for %r. +Use help() to get the interactive help utility. +Use help(str) for help on the str class.'''.replace('\n', os.linesep) + +# output pattern for module with bad imports +badimport_pattern = "problem in %s - ModuleNotFoundError: No module named %r" + +expected_dynamicattribute_pattern = """ +Help on class DA in module %s: + +class DA(builtins.object) + | Data descriptors defined here: + |\x20\x20 + | __dict__%s + |\x20\x20 + | __weakref__%s + |\x20\x20 + | ham + |\x20\x20 + | ---------------------------------------------------------------------- + | Data and other attributes inherited from Meta: + |\x20\x20 + | ham = 'spam' +""".strip() + +expected_virtualattribute_pattern1 = """ +Help on class Class in module %s: + +class Class(builtins.object) + | Data and other attributes inherited from Meta: + |\x20\x20 + | LIFE = 42 +""".strip() + +expected_virtualattribute_pattern2 = """ +Help on class Class1 in module %s: + +class Class1(builtins.object) + | Data and other attributes inherited from Meta1: + |\x20\x20 + | one = 1 +""".strip() + +expected_virtualattribute_pattern3 = """ +Help on class Class2 in module %s: + +class Class2(Class1) + | Method resolution order: + | Class2 + | Class1 + | builtins.object + |\x20\x20 + | Data and other attributes inherited from Meta1: + |\x20\x20 + | one = 1 + |\x20\x20 + | ---------------------------------------------------------------------- + | Data and other attributes inherited from Meta3: + |\x20\x20 + | three = 3 + |\x20\x20 + | ---------------------------------------------------------------------- + | Data and other attributes inherited from Meta2: + |\x20\x20 + | two = 2 +""".strip() + +expected_missingattribute_pattern = """ +Help on class C in module %s: + +class C(builtins.object) + | Data and other attributes defined here: + |\x20\x20 + | here = 'present!' +""".strip() + +def run_pydoc(module_name, *args, **env): + """ + Runs pydoc on the specified module. Returns the stripped + output of pydoc. + """ + args = args + (module_name,) + # do not write bytecode files to avoid caching errors + rc, out, err = assert_python_ok('-B', pydoc.__file__, *args, **env) + return out.strip() + +def run_pydoc_fail(module_name, *args, **env): + """ + Runs pydoc on the specified module expecting a failure. + """ + args = args + (module_name,) + rc, out, err = assert_python_failure('-B', pydoc.__file__, *args, **env) + return out.strip() + +def get_pydoc_html(module): + "Returns pydoc generated output as html" + doc = pydoc.HTMLDoc() + output = doc.docmodule(module) + loc = doc.getdocloc(pydoc_mod) or "" + if loc: + loc = "
Module Docs" + return output.strip(), loc + +def clean_text(doc): + # clean up the extra text formatting that pydoc performs + return re.sub('\b.', '', doc) + +def get_pydoc_link(module): + "Returns a documentation web link of a module" + abspath = os.path.abspath + dirname = os.path.dirname + basedir = dirname(dirname(dirname(abspath(__file__)))) + doc = pydoc.TextDoc() + loc = doc.getdocloc(module, basedir=basedir) + return loc + +def get_pydoc_text(module): + "Returns pydoc generated output as text" + doc = pydoc.TextDoc() + loc = doc.getdocloc(pydoc_mod) or "" + if loc: + loc = "\nMODULE DOCS\n " + loc + "\n" + + output = doc.docmodule(module) + output = clean_text(output) + return output.strip(), loc + +def get_html_title(text): + # Bit of hack, but good enough for test purposes + header, _, _ = text.partition("") + _, _, title = header.partition("") + title, _, _ = title.partition("") + return title + + +def html2text(html): + """A quick and dirty implementation of html2text. + + Tailored for pydoc tests only. + """ + html = html.replace("
", "\n") + html = html.replace("
", "-"*70) + html = re.sub("<.*?>", "", html) + html = pydoc.replace(html, " ", " ", ">", ">", "<", "<") + return html + + +class PydocBaseTest(unittest.TestCase): + + def _restricted_walk_packages(self, walk_packages, path=None): + """ + A version of pkgutil.walk_packages() that will restrict itself to + a given path. + """ + default_path = path or [os.path.dirname(__file__)] + def wrapper(path=None, prefix='', onerror=None): + return walk_packages(path or default_path, prefix, onerror) + return wrapper + + @contextlib.contextmanager + def restrict_walk_packages(self, path=None): + walk_packages = pkgutil.walk_packages + pkgutil.walk_packages = self._restricted_walk_packages(walk_packages, + path) + try: + yield + finally: + pkgutil.walk_packages = walk_packages + + def call_url_handler(self, url, expected_title): + text = pydoc._url_handler(url, "text/html") + result = get_html_title(text) + # Check the title to ensure an unexpected error page was not returned + self.assertEqual(result, expected_title, text) + return text + + +class PydocDocTest(unittest.TestCase): + maxDiff = None + + @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(), + 'trace function introduces __locals__ unexpectedly') + @requires_docstrings + def test_html_doc(self): + result, doc_loc = get_pydoc_html(pydoc_mod) + text_result = html2text(result) + text_lines = [line.strip() for line in text_result.splitlines()] + text_lines = [line for line in text_lines if line] + del text_lines[1] + expected_lines = html2text_of_expected.splitlines() + expected_lines = [line.strip() for line in expected_lines if line] + self.assertEqual(text_lines, expected_lines) + mod_file = inspect.getabsfile(pydoc_mod) + mod_url = urllib.parse.quote(mod_file) + self.assertIn(mod_url, result) + self.assertIn(mod_file, result) + self.assertIn(doc_loc, result) + + @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(), + 'trace function introduces __locals__ unexpectedly') + @requires_docstrings + def test_text_doc(self): + result, doc_loc = get_pydoc_text(pydoc_mod) + expected_text = expected_text_pattern % ( + (doc_loc,) + + expected_text_data_docstrings + + (inspect.getabsfile(pydoc_mod),)) + self.assertEqual(expected_text, result) + + def test_text_enum_member_with_value_zero(self): + # Test issue #20654 to ensure enum member with value 0 can be + # displayed. It used to throw KeyError: 'zero'. + import enum + class BinaryInteger(enum.IntEnum): + zero = 0 + one = 1 + doc = pydoc.render_doc(BinaryInteger) + self.assertIn('BinaryInteger.zero', doc) + + def test_mixed_case_module_names_are_lower_cased(self): + # issue16484 + doc_link = get_pydoc_link(xml.etree.ElementTree) + self.assertIn('xml.etree.elementtree', doc_link) + + def test_issue8225(self): + # Test issue8225 to ensure no doc link appears for xml.etree + result, doc_loc = get_pydoc_text(xml.etree) + self.assertEqual(doc_loc, "", "MODULE DOCS incorrectly includes a link") + + def test_getpager_with_stdin_none(self): + previous_stdin = sys.stdin + try: + sys.stdin = None + pydoc.getpager() # Shouldn't fail. + finally: + sys.stdin = previous_stdin + + def test_non_str_name(self): + # issue14638 + # Treat illegal (non-str) name like no name + + class A: + __name__ = 42 + class B: + pass + adoc = pydoc.render_doc(A()) + bdoc = pydoc.render_doc(B()) + self.assertEqual(adoc.replace("A", "B"), bdoc) + + def test_not_here(self): + missing_module = "test.i_am_not_here" + result = str(run_pydoc_fail(missing_module), 'ascii') + expected = missing_pattern % missing_module + self.assertEqual(expected, result, + "documentation for missing module found") + + @requires_docstrings + def test_not_ascii(self): + result = run_pydoc('test.test_pydoc.test_pydoc.nonascii', PYTHONIOENCODING='ascii') + encoded = nonascii.__doc__.encode('ascii', 'backslashreplace') + self.assertIn(encoded, result) + + def test_input_strip(self): + missing_module = " test.i_am_not_here " + result = str(run_pydoc_fail(missing_module), 'ascii') + expected = missing_pattern % missing_module.strip() + self.assertEqual(expected, result) + + def test_stripid(self): + # test with strings, other implementations might have different repr() + stripid = pydoc.stripid + # strip the id + self.assertEqual(stripid(''), + '') + self.assertEqual(stripid(''), + '') + # nothing to strip, return the same text + self.assertEqual(stripid('42'), '42') + self.assertEqual(stripid(""), + "") + + def test_builtin_with_more_than_four_children(self): + """Tests help on builtin object which have more than four child classes. + + When running help() on a builtin class which has child classes, it + should contain a "Built-in subclasses" section and only 4 classes + should be displayed with a hint on how many more subclasses are present. + For example: + + >>> help(object) + Help on class object in module builtins: + + class object + | The most base type + | + | Built-in subclasses: + | async_generator + | BaseException + | builtin_function_or_method + | bytearray + | ... and 82 other subclasses + """ + doc = pydoc.TextDoc() + text = doc.docclass(object) + snip = (" | Built-in subclasses:\n" + " | async_generator\n" + " | BaseException\n" + " | builtin_function_or_method\n" + " | bytearray\n" + " | ... and \\d+ other subclasses") + self.assertRegex(text, snip) + + def test_builtin_with_child(self): + """Tests help on builtin object which have only child classes. + + When running help() on a builtin class which has child classes, it + should contain a "Built-in subclasses" section. For example: + + >>> help(ArithmeticError) + Help on class ArithmeticError in module builtins: + + class ArithmeticError(Exception) + | Base class for arithmetic errors. + | + ... + | + | Built-in subclasses: + | FloatingPointError + | OverflowError + | ZeroDivisionError + """ + doc = pydoc.TextDoc() + text = doc.docclass(ArithmeticError) + snip = (" | Built-in subclasses:\n" + " | FloatingPointError\n" + " | OverflowError\n" + " | ZeroDivisionError") + self.assertIn(snip, text) + + def test_builtin_with_grandchild(self): + """Tests help on builtin classes which have grandchild classes. + + When running help() on a builtin class which has child classes, it + should contain a "Built-in subclasses" section. However, if it also has + grandchildren, these should not show up on the subclasses section. + For example: + + >>> help(Exception) + Help on class Exception in module builtins: + + class Exception(BaseException) + | Common base class for all non-exit exceptions. + | + ... + | + | Built-in subclasses: + | ArithmeticError + | AssertionError + | AttributeError + ... + """ + doc = pydoc.TextDoc() + text = doc.docclass(Exception) + snip = (" | Built-in subclasses:\n" + " | ArithmeticError\n" + " | AssertionError\n" + " | AttributeError") + self.assertIn(snip, text) + # Testing that the grandchild ZeroDivisionError does not show up + self.assertNotIn('ZeroDivisionError', text) + + def test_builtin_no_child(self): + """Tests help on builtin object which have no child classes. + + When running help() on a builtin class which has no child classes, it + should not contain any "Built-in subclasses" section. For example: + + >>> help(ZeroDivisionError) + + Help on class ZeroDivisionError in module builtins: + + class ZeroDivisionError(ArithmeticError) + | Second argument to a division or modulo operation was zero. + | + | Method resolution order: + | ZeroDivisionError + | ArithmeticError + | Exception + | BaseException + | object + | + | Methods defined here: + ... + """ + doc = pydoc.TextDoc() + text = doc.docclass(ZeroDivisionError) + # Testing that the subclasses section does not appear + self.assertNotIn('Built-in subclasses', text) + + def test_builtin_on_metaclasses(self): + """Tests help on metaclasses. + + When running help() on a metaclasses such as type, it + should not contain any "Built-in subclasses" section. + """ + doc = pydoc.TextDoc() + text = doc.docclass(type) + # Testing that the subclasses section does not appear + self.assertNotIn('Built-in subclasses', text) + + def test_fail_help_cli(self): + elines = (missing_pattern % 'abd').splitlines() + with spawn_python("-c" "help()") as proc: + out, _ = proc.communicate(b"abd") + olines = out.decode().splitlines()[-9:-6] + olines[0] = olines[0].removeprefix('help> ') + self.assertEqual(elines, olines) + + def test_fail_help_output_redirect(self): + with StringIO() as buf: + helper = pydoc.Helper(output=buf) + helper.help("abd") + expected = missing_pattern % "abd" + self.assertEqual(expected, buf.getvalue().strip().replace('\n', os.linesep)) + + @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(), + 'trace function introduces __locals__ unexpectedly') + @requires_docstrings + def test_help_output_redirect(self): + # issue 940286, if output is set in Helper, then all output from + # Helper.help should be redirected + getpager_old = pydoc.getpager + getpager_new = lambda: (lambda x: x) + self.maxDiff = None + + buf = StringIO() + helper = pydoc.Helper(output=buf) + unused, doc_loc = get_pydoc_text(pydoc_mod) + module = "test.test_pydoc.pydoc_mod" + help_header = """ + Help on module test.test_pydoc.pydoc_mod in test.test_pydoc: + + """.lstrip() + help_header = textwrap.dedent(help_header) + expected_help_pattern = help_header + expected_text_pattern + + pydoc.getpager = getpager_new + try: + with captured_output('stdout') as output, \ + captured_output('stderr') as err: + helper.help(module) + result = buf.getvalue().strip() + expected_text = expected_help_pattern % ( + (doc_loc,) + + expected_text_data_docstrings + + (inspect.getabsfile(pydoc_mod),)) + self.assertEqual('', output.getvalue()) + self.assertEqual('', err.getvalue()) + self.assertEqual(expected_text, result) + finally: + pydoc.getpager = getpager_old + + def test_lambda_with_return_annotation(self): + func = lambda a, b, c: 1 + func.__annotations__ = {"return": int} + with captured_output('stdout') as help_io: + pydoc.help(func) + helptext = help_io.getvalue() + self.assertIn("lambda (a, b, c) -> int", helptext) + + def test_lambda_without_return_annotation(self): + func = lambda a, b, c: 1 + func.__annotations__ = {"a": int, "b": int, "c": int} + with captured_output('stdout') as help_io: + pydoc.help(func) + helptext = help_io.getvalue() + self.assertIn("lambda (a: int, b: int, c: int)", helptext) + + def test_lambda_with_return_and_params_annotation(self): + func = lambda a, b, c: 1 + func.__annotations__ = {"a": int, "b": int, "c": int, "return": int} + with captured_output('stdout') as help_io: + pydoc.help(func) + helptext = help_io.getvalue() + self.assertIn("lambda (a: int, b: int, c: int) -> int", helptext) + + def test_namedtuple_fields(self): + Person = namedtuple('Person', ['nickname', 'firstname']) + with captured_stdout() as help_io: + pydoc.help(Person) + helptext = help_io.getvalue() + self.assertIn("nickname", helptext) + self.assertIn("firstname", helptext) + self.assertIn("Alias for field number 0", helptext) + self.assertIn("Alias for field number 1", helptext) + + def test_namedtuple_public_underscore(self): + NT = namedtuple('NT', ['abc', 'def'], rename=True) + with captured_stdout() as help_io: + pydoc.help(NT) + helptext = help_io.getvalue() + self.assertIn('_1', helptext) + self.assertIn('_replace', helptext) + self.assertIn('_asdict', helptext) + + def test_synopsis(self): + self.addCleanup(unlink, TESTFN) + for encoding in ('ISO-8859-1', 'UTF-8'): + with open(TESTFN, 'w', encoding=encoding) as script: + if encoding != 'UTF-8': + print('#coding: {}'.format(encoding), file=script) + print('"""line 1: h\xe9', file=script) + print('line 2: hi"""', file=script) + synopsis = pydoc.synopsis(TESTFN, {}) + self.assertEqual(synopsis, 'line 1: h\xe9') + + @requires_docstrings + def test_synopsis_sourceless(self): + os = import_helper.import_fresh_module('os') + expected = os.__doc__.splitlines()[0] + filename = os.__cached__ + synopsis = pydoc.synopsis(filename) + + self.assertEqual(synopsis, expected) + + def test_synopsis_sourceless_empty_doc(self): + with os_helper.temp_cwd() as test_dir: + init_path = os.path.join(test_dir, 'foomod42.py') + cached_path = importlib.util.cache_from_source(init_path) + with open(init_path, 'w') as fobj: + fobj.write("foo = 1") + py_compile.compile(init_path) + synopsis = pydoc.synopsis(init_path, {}) + self.assertIsNone(synopsis) + synopsis_cached = pydoc.synopsis(cached_path, {}) + self.assertIsNone(synopsis_cached) + + def test_splitdoc_with_description(self): + example_string = "I Am A Doc\n\n\nHere is my description" + self.assertEqual(pydoc.splitdoc(example_string), + ('I Am A Doc', '\nHere is my description')) + + def test_is_package_when_not_package(self): + with os_helper.temp_cwd() as test_dir: + self.assertFalse(pydoc.ispackage(test_dir)) + + def test_is_package_when_is_package(self): + with os_helper.temp_cwd() as test_dir: + init_path = os.path.join(test_dir, '__init__.py') + open(init_path, 'w').close() + self.assertTrue(pydoc.ispackage(test_dir)) + os.remove(init_path) + + def test_allmethods(self): + # issue 17476: allmethods was no longer returning unbound methods. + # This test is a bit fragile in the face of changes to object and type, + # but I can't think of a better way to do it without duplicating the + # logic of the function under test. + + class TestClass(object): + def method_returning_true(self): + return True + + # What we expect to get back: everything on object... + expected = dict(vars(object)) + # ...plus our unbound method... + expected['method_returning_true'] = TestClass.method_returning_true + # ...but not the non-methods on object. + del expected['__doc__'] + del expected['__class__'] + # inspect resolves descriptors on type into methods, but vars doesn't, + # so we need to update __subclasshook__ and __init_subclass__. + expected['__subclasshook__'] = TestClass.__subclasshook__ + expected['__init_subclass__'] = TestClass.__init_subclass__ + + methods = pydoc.allmethods(TestClass) + self.assertDictEqual(methods, expected) + + @requires_docstrings + def test_method_aliases(self): + class A: + def tkraise(self, aboveThis=None): + """Raise this widget in the stacking order.""" + lift = tkraise + def a_size(self): + """Return size""" + class B(A): + def itemconfigure(self, tagOrId, cnf=None, **kw): + """Configure resources of an item TAGORID.""" + itemconfig = itemconfigure + b_size = A.a_size + + doc = pydoc.render_doc(B) + doc = clean_text(doc) + self.assertEqual(doc, '''\ +Python Library Documentation: class B in module %s + +class B(A) + | Method resolution order: + | B + | A + | builtins.object + |\x20\x20 + | Methods defined here: + |\x20\x20 + | b_size = a_size(self) + |\x20\x20 + | itemconfig = itemconfigure(self, tagOrId, cnf=None, **kw) + |\x20\x20 + | itemconfigure(self, tagOrId, cnf=None, **kw) + | Configure resources of an item TAGORID. + |\x20\x20 + | ---------------------------------------------------------------------- + | Methods inherited from A: + |\x20\x20 + | a_size(self) + | Return size + |\x20\x20 + | lift = tkraise(self, aboveThis=None) + |\x20\x20 + | tkraise(self, aboveThis=None) + | Raise this widget in the stacking order. + |\x20\x20 + | ---------------------------------------------------------------------- + | Data descriptors inherited from A: + |\x20\x20 + | __dict__ + | dictionary for instance variables + |\x20\x20 + | __weakref__ + | list of weak references to the object +''' % __name__) + + doc = pydoc.render_doc(B, renderer=pydoc.HTMLDoc()) + expected_text = f""" +Python Library Documentation + +class B in module {__name__} +class B(A) + Method resolution order: + B + A + builtins.object + + Methods defined here: + b_size = a_size(self) + itemconfig = itemconfigure(self, tagOrId, cnf=None, **kw) + itemconfigure(self, tagOrId, cnf=None, **kw) + Configure resources of an item TAGORID. + + Methods inherited from A: + a_size(self) + Return size + lift = tkraise(self, aboveThis=None) + tkraise(self, aboveThis=None) + Raise this widget in the stacking order. + + Data descriptors inherited from A: + __dict__ + dictionary for instance variables + __weakref__ + list of weak references to the object +""" + as_text = html2text(doc) + expected_lines = [line.strip() for line in expected_text.split("\n") if line] + for expected_line in expected_lines: + self.assertIn(expected_line, as_text) + + def test__future__imports(self): + # __future__ features are excluded from module help, + # except when it's the __future__ module itself + import __future__ + future_text, _ = get_pydoc_text(__future__) + future_html, _ = get_pydoc_html(__future__) + pydoc_mod_text, _ = get_pydoc_text(pydoc_mod) + pydoc_mod_html, _ = get_pydoc_html(pydoc_mod) + + for feature in __future__.all_feature_names: + txt = f"{feature} = _Feature" + html = f"{feature} = _Feature" + self.assertIn(txt, future_text) + self.assertIn(html, future_html) + self.assertNotIn(txt, pydoc_mod_text) + self.assertNotIn(html, pydoc_mod_html) + + +class PydocImportTest(PydocBaseTest): + + def setUp(self): + self.test_dir = os.mkdir(TESTFN) + self.addCleanup(rmtree, TESTFN) + importlib.invalidate_caches() + + def test_badimport(self): + # This tests the fix for issue 5230, where if pydoc found the module + # but the module had an internal import error pydoc would report no doc + # found. + modname = 'testmod_xyzzy' + testpairs = ( + ('i_am_not_here', 'i_am_not_here'), + ('test.i_am_not_here_either', 'test.i_am_not_here_either'), + ('test.i_am_not_here.neither_am_i', 'test.i_am_not_here'), + ('i_am_not_here.{}'.format(modname), 'i_am_not_here'), + ('test.{}'.format(modname), 'test.{}'.format(modname)), + ) + + sourcefn = os.path.join(TESTFN, modname) + os.extsep + "py" + for importstring, expectedinmsg in testpairs: + with open(sourcefn, 'w') as f: + f.write("import {}\n".format(importstring)) + result = run_pydoc_fail(modname, PYTHONPATH=TESTFN).decode("ascii") + expected = badimport_pattern % (modname, expectedinmsg) + self.assertEqual(expected, result) + + def test_apropos_with_bad_package(self): + # Issue 7425 - pydoc -k failed when bad package on path + pkgdir = os.path.join(TESTFN, "syntaxerr") + os.mkdir(pkgdir) + badsyntax = os.path.join(pkgdir, "__init__") + os.extsep + "py" + with open(badsyntax, 'w') as f: + f.write("invalid python syntax = $1\n") + with self.restrict_walk_packages(path=[TESTFN]): + with captured_stdout() as out: + with captured_stderr() as err: + pydoc.apropos('xyzzy') + # No result, no error + self.assertEqual(out.getvalue(), '') + self.assertEqual(err.getvalue(), '') + # The package name is still matched + with captured_stdout() as out: + with captured_stderr() as err: + pydoc.apropos('syntaxerr') + self.assertEqual(out.getvalue().strip(), 'syntaxerr') + self.assertEqual(err.getvalue(), '') + + def test_apropos_with_unreadable_dir(self): + # Issue 7367 - pydoc -k failed when unreadable dir on path + self.unreadable_dir = os.path.join(TESTFN, "unreadable") + os.mkdir(self.unreadable_dir, 0) + self.addCleanup(os.rmdir, self.unreadable_dir) + # Note, on Windows the directory appears to be still + # readable so this is not really testing the issue there + with self.restrict_walk_packages(path=[TESTFN]): + with captured_stdout() as out: + with captured_stderr() as err: + pydoc.apropos('SOMEKEY') + # No result, no error + self.assertEqual(out.getvalue(), '') + self.assertEqual(err.getvalue(), '') + + @os_helper.skip_unless_working_chmod + @unittest.skipIf(is_emscripten, "cannot remove x bit") + def test_apropos_empty_doc(self): + pkgdir = os.path.join(TESTFN, 'walkpkg') + os.mkdir(pkgdir) + self.addCleanup(rmtree, pkgdir) + init_path = os.path.join(pkgdir, '__init__.py') + with open(init_path, 'w') as fobj: + fobj.write("foo = 1") + current_mode = stat.S_IMODE(os.stat(pkgdir).st_mode) + try: + os.chmod(pkgdir, current_mode & ~stat.S_IEXEC) + with self.restrict_walk_packages(path=[TESTFN]), captured_stdout() as stdout: + pydoc.apropos('') + self.assertIn('walkpkg', stdout.getvalue()) + finally: + os.chmod(pkgdir, current_mode) + + def test_url_search_package_error(self): + # URL handler search should cope with packages that raise exceptions + pkgdir = os.path.join(TESTFN, "test_error_package") + os.mkdir(pkgdir) + init = os.path.join(pkgdir, "__init__.py") + with open(init, "wt", encoding="ascii") as f: + f.write("""raise ValueError("ouch")\n""") + with self.restrict_walk_packages(path=[TESTFN]): + # Package has to be importable for the error to have any effect + saved_paths = tuple(sys.path) + sys.path.insert(0, TESTFN) + try: + with self.assertRaisesRegex(ValueError, "ouch"): + import test_error_package # Sanity check + + text = self.call_url_handler("search?key=test_error_package", + "Pydoc: Search Results") + found = ('' + 'test_error_package') + self.assertIn(found, text) + finally: + sys.path[:] = saved_paths + + @unittest.skip('causes undesirable side-effects (#20128)') + def test_modules(self): + # See Helper.listmodules(). + num_header_lines = 2 + num_module_lines_min = 5 # Playing it safe. + num_footer_lines = 3 + expected = num_header_lines + num_module_lines_min + num_footer_lines + + output = StringIO() + helper = pydoc.Helper(output=output) + helper('modules') + result = output.getvalue().strip() + num_lines = len(result.splitlines()) + + self.assertGreaterEqual(num_lines, expected) + + @unittest.skip('causes undesirable side-effects (#20128)') + def test_modules_search(self): + # See Helper.listmodules(). + expected = 'pydoc - ' + + output = StringIO() + helper = pydoc.Helper(output=output) + with captured_stdout() as help_io: + helper('modules pydoc') + result = help_io.getvalue() + + self.assertIn(expected, result) + + @unittest.skip('some buildbots are not cooperating (#20128)') + def test_modules_search_builtin(self): + expected = 'gc - ' + + output = StringIO() + helper = pydoc.Helper(output=output) + with captured_stdout() as help_io: + helper('modules garbage') + result = help_io.getvalue() + + self.assertTrue(result.startswith(expected)) + + def test_importfile(self): + loaded_pydoc = pydoc.importfile(pydoc.__file__) + + self.assertIsNot(loaded_pydoc, pydoc) + self.assertEqual(loaded_pydoc.__name__, 'pydoc') + self.assertEqual(loaded_pydoc.__file__, pydoc.__file__) + self.assertEqual(loaded_pydoc.__spec__, pydoc.__spec__) + + +class TestDescriptions(unittest.TestCase): + + def test_module(self): + # Check that pydocfodder module can be described + doc = pydoc.render_doc(pydocfodder) + self.assertIn("pydocfodder", doc) + + def test_class(self): + class C: "New-style class" + c = C() + + self.assertEqual(pydoc.describe(C), 'class C') + self.assertEqual(pydoc.describe(c), 'C') + expected = 'C in module %s object' % __name__ + self.assertIn(expected, pydoc.render_doc(c)) + + def test_generic_alias(self): + self.assertEqual(pydoc.describe(typing.List[int]), '_GenericAlias') + doc = pydoc.render_doc(typing.List[int], renderer=pydoc.plaintext) + self.assertIn('_GenericAlias in module typing', doc) + self.assertIn('List = class list(object)', doc) + if not MISSING_C_DOCSTRINGS: + self.assertIn(list.__doc__.strip().splitlines()[0], doc) + + self.assertEqual(pydoc.describe(list[int]), 'GenericAlias') + doc = pydoc.render_doc(list[int], renderer=pydoc.plaintext) + self.assertIn('GenericAlias in module builtins', doc) + self.assertIn('\nclass list(object)', doc) + if not MISSING_C_DOCSTRINGS: + self.assertIn(list.__doc__.strip().splitlines()[0], doc) + + def test_union_type(self): + self.assertEqual(pydoc.describe(typing.Union[int, str]), '_UnionGenericAlias') + doc = pydoc.render_doc(typing.Union[int, str], renderer=pydoc.plaintext) + self.assertIn('_UnionGenericAlias in module typing', doc) + self.assertIn('Union = typing.Union', doc) + if typing.Union.__doc__: + self.assertIn(typing.Union.__doc__.strip().splitlines()[0], doc) + + self.assertEqual(pydoc.describe(int | str), 'UnionType') + doc = pydoc.render_doc(int | str, renderer=pydoc.plaintext) + self.assertIn('UnionType in module types object', doc) + self.assertIn('\nclass UnionType(builtins.object)', doc) + if not MISSING_C_DOCSTRINGS: + self.assertIn(types.UnionType.__doc__.strip().splitlines()[0], doc) + + def test_special_form(self): + self.assertEqual(pydoc.describe(typing.NoReturn), '_SpecialForm') + doc = pydoc.render_doc(typing.NoReturn, renderer=pydoc.plaintext) + self.assertIn('_SpecialForm in module typing', doc) + if typing.NoReturn.__doc__: + self.assertIn('NoReturn = typing.NoReturn', doc) + self.assertIn(typing.NoReturn.__doc__.strip().splitlines()[0], doc) + else: + self.assertIn('NoReturn = class _SpecialForm(_Final)', doc) + + def test_typing_pydoc(self): + def foo(data: typing.List[typing.Any], + x: int) -> typing.Iterator[typing.Tuple[int, typing.Any]]: + ... + T = typing.TypeVar('T') + class C(typing.Generic[T], typing.Mapping[int, str]): ... + self.assertEqual(pydoc.render_doc(foo).splitlines()[-1], + 'f\x08fo\x08oo\x08o(data: List[Any], x: int)' + ' -> Iterator[Tuple[int, Any]]') + self.assertEqual(pydoc.render_doc(C).splitlines()[2], + 'class C\x08C(collections.abc.Mapping, typing.Generic)') + + def test_builtin(self): + for name in ('str', 'str.translate', 'builtins.str', + 'builtins.str.translate'): + # test low-level function + self.assertIsNotNone(pydoc.locate(name)) + # test high-level function + try: + pydoc.render_doc(name) + except ImportError: + self.fail('finding the doc of {!r} failed'.format(name)) + + for name in ('notbuiltins', 'strrr', 'strr.translate', + 'str.trrrranslate', 'builtins.strrr', + 'builtins.str.trrranslate'): + self.assertIsNone(pydoc.locate(name)) + self.assertRaises(ImportError, pydoc.render_doc, name) + + @staticmethod + def _get_summary_line(o): + text = pydoc.plain(pydoc.render_doc(o)) + lines = text.split('\n') + assert len(lines) >= 2 + return lines[2] + + @staticmethod + def _get_summary_lines(o): + text = pydoc.plain(pydoc.render_doc(o)) + lines = text.split('\n') + return '\n'.join(lines[2:]) + + # these should include "self" + def test_unbound_python_method(self): + self.assertEqual(self._get_summary_line(textwrap.TextWrapper.wrap), + "wrap(self, text)") + + @requires_docstrings + def test_unbound_builtin_method(self): + self.assertEqual(self._get_summary_line(_pickle.Pickler.dump), + "dump(self, obj, /) unbound _pickle.Pickler method") + + # these no longer include "self" + def test_bound_python_method(self): + t = textwrap.TextWrapper() + self.assertEqual(self._get_summary_line(t.wrap), + "wrap(text) method of textwrap.TextWrapper instance") + def test_field_order_for_named_tuples(self): + Person = namedtuple('Person', ['nickname', 'firstname', 'agegroup']) + s = pydoc.render_doc(Person) + self.assertLess(s.index('nickname'), s.index('firstname')) + self.assertLess(s.index('firstname'), s.index('agegroup')) + + class NonIterableFields: + _fields = None + + class NonHashableFields: + _fields = [[]] + + # Make sure these doesn't fail + pydoc.render_doc(NonIterableFields) + pydoc.render_doc(NonHashableFields) + + @requires_docstrings + def test_bound_builtin_method(self): + s = StringIO() + p = _pickle.Pickler(s) + self.assertEqual(self._get_summary_line(p.dump), + "dump(obj, /) method of _pickle.Pickler instance") + + # this should *never* include self! + @requires_docstrings + def test_module_level_callable(self): + self.assertEqual(self._get_summary_line(os.stat), + "stat(path, *, dir_fd=None, follow_symlinks=True)") + + def test_unbound_builtin_method_noargs(self): + self.assertEqual(self._get_summary_line(str.lower), + "lower(self, /) unbound builtins.str method") + + def test_bound_builtin_method_noargs(self): + self.assertEqual(self._get_summary_line(''.lower), + "lower() method of builtins.str instance") + + @requires_docstrings + def test_staticmethod(self): + class X: + @staticmethod + def sm(x, y): + '''A static method''' + ... + self.assertEqual(self._get_summary_lines(X.__dict__['sm']), + 'sm(x, y)\n' + ' A static method\n') + self.assertEqual(self._get_summary_lines(X.sm), """\ +sm(x, y) + A static method +""") + self.assertIn(""" + | Static methods defined here: + |\x20\x20 + | sm(x, y) + | A static method +""", pydoc.plain(pydoc.render_doc(X))) + + @requires_docstrings + def test_classmethod(self): + class X: + @classmethod + def cm(cls, x): + '''A class method''' + ... + self.assertEqual(self._get_summary_lines(X.__dict__['cm']), + 'cm(...)\n' + ' A class method\n') + self.assertEqual(self._get_summary_lines(X.cm), """\ +cm(x) class method of test.test_pydoc.test_pydoc.X + A class method +""") + self.assertIn(""" + | Class methods defined here: + |\x20\x20 + | cm(x) + | A class method +""", pydoc.plain(pydoc.render_doc(X))) + + @requires_docstrings + def test_getset_descriptor(self): + # Currently these attributes are implemented as getset descriptors + # in CPython. + self.assertEqual(self._get_summary_line(int.numerator), "numerator") + self.assertEqual(self._get_summary_line(float.real), "real") + self.assertEqual(self._get_summary_line(Exception.args), "args") + self.assertEqual(self._get_summary_line(memoryview.obj), "obj") + + @requires_docstrings + def test_member_descriptor(self): + # Currently these attributes are implemented as member descriptors + # in CPython. + self.assertEqual(self._get_summary_line(complex.real), "real") + self.assertEqual(self._get_summary_line(range.start), "start") + self.assertEqual(self._get_summary_line(slice.start), "start") + self.assertEqual(self._get_summary_line(property.fget), "fget") + self.assertEqual(self._get_summary_line(StopIteration.value), "value") + + @requires_docstrings + def test_slot_descriptor(self): + class Point: + __slots__ = 'x', 'y' + self.assertEqual(self._get_summary_line(Point.x), "x") + + @requires_docstrings + def test_dict_attr_descriptor(self): + class NS: + pass + self.assertEqual(self._get_summary_line(NS.__dict__['__dict__']), + "__dict__") + + @requires_docstrings + def test_structseq_member_descriptor(self): + self.assertEqual(self._get_summary_line(type(sys.hash_info).width), + "width") + self.assertEqual(self._get_summary_line(type(sys.flags).debug), + "debug") + self.assertEqual(self._get_summary_line(type(sys.version_info).major), + "major") + self.assertEqual(self._get_summary_line(type(sys.float_info).max), + "max") + + @requires_docstrings + def test_namedtuple_field_descriptor(self): + Box = namedtuple('Box', ('width', 'height')) + self.assertEqual(self._get_summary_lines(Box.width), """\ + Alias for field number 0 +""") + + @requires_docstrings + def test_property(self): + class Rect: + @property + def area(self): + '''Area of the rect''' + return self.w * self.h + + self.assertEqual(self._get_summary_lines(Rect.area), """\ + Area of the rect +""") + self.assertIn(""" + | area + | Area of the rect +""", pydoc.plain(pydoc.render_doc(Rect))) + + @requires_docstrings + def test_custom_non_data_descriptor(self): + class Descr: + def __get__(self, obj, cls): + if obj is None: + return self + return 42 + class X: + attr = Descr() + + self.assertEqual(self._get_summary_lines(X.attr), f"""\ +<{__name__}.TestDescriptions.test_custom_non_data_descriptor..Descr object>""") + + X.attr.__doc__ = 'Custom descriptor' + self.assertEqual(self._get_summary_lines(X.attr), f"""\ +<{__name__}.TestDescriptions.test_custom_non_data_descriptor..Descr object> + Custom descriptor +""") + + X.attr.__name__ = 'foo' + self.assertEqual(self._get_summary_lines(X.attr), """\ +foo(...) + Custom descriptor +""") + + @requires_docstrings + def test_custom_data_descriptor(self): + class Descr: + def __get__(self, obj, cls): + if obj is None: + return self + return 42 + def __set__(self, obj, cls): + 1/0 + class X: + attr = Descr() + + self.assertEqual(self._get_summary_lines(X.attr), "") + + X.attr.__doc__ = 'Custom descriptor' + self.assertEqual(self._get_summary_lines(X.attr), """\ + Custom descriptor +""") + + X.attr.__name__ = 'foo' + self.assertEqual(self._get_summary_lines(X.attr), """\ +foo + Custom descriptor +""") + + def test_async_annotation(self): + async def coro_function(ign) -> int: + return 1 + + text = pydoc.plain(pydoc.plaintext.document(coro_function)) + self.assertIn('async coro_function', text) + + html = pydoc.HTMLDoc().document(coro_function) + self.assertIn( + 'async coro_function', + html) + + def test_async_generator_annotation(self): + async def an_async_generator(): + yield 1 + + text = pydoc.plain(pydoc.plaintext.document(an_async_generator)) + self.assertIn('async an_async_generator', text) + + html = pydoc.HTMLDoc().document(an_async_generator) + self.assertIn( + 'async an_async_generator', + html) + + @requires_docstrings + def test_html_for_https_links(self): + def a_fn_with_https_link(): + """a link https://localhost/""" + pass + + html = pydoc.HTMLDoc().document(a_fn_with_https_link) + self.assertIn( + 'https://localhost/', + html + ) + + +class PydocFodderTest(unittest.TestCase): + + def getsection(self, text, beginline, endline): + lines = text.splitlines() + beginindex, endindex = 0, None + if beginline is not None: + beginindex = lines.index(beginline) + if endline is not None: + endindex = lines.index(endline, beginindex) + return lines[beginindex:endindex] + + def test_text_doc_routines_in_class(self, cls=pydocfodder.B): + doc = pydoc.TextDoc() + result = doc.docclass(cls) + result = clean_text(result) + where = 'defined here' if cls is pydocfodder.B else 'inherited from B' + lines = self.getsection(result, f' | Methods {where}:', ' | ' + '-'*70) + self.assertIn(' | A_method_alias = A_method(self)', lines) + self.assertIn(' | B_method_alias = B_method(self)', lines) + self.assertIn(' | A_staticmethod(x, y) from test.test_pydoc.pydocfodder.A', lines) + self.assertIn(' | A_staticmethod_alias = A_staticmethod(x, y)', lines) + self.assertIn(' | global_func(x, y) from test.test_pydoc.pydocfodder', lines) + self.assertIn(' | global_func_alias = global_func(x, y)', lines) + self.assertIn(' | global_func2_alias = global_func2(x, y) from test.test_pydoc.pydocfodder', lines) + self.assertIn(' | __repr__(self, /) from builtins.object', lines) + self.assertIn(' | object_repr = __repr__(self, /)', lines) + + lines = self.getsection(result, f' | Static methods {where}:', ' | ' + '-'*70) + self.assertIn(' | A_classmethod_ref = A_classmethod(x) class method of test.test_pydoc.pydocfodder.A', lines) + note = '' if cls is pydocfodder.B else ' class method of test.test_pydoc.pydocfodder.B' + self.assertIn(' | B_classmethod_ref = B_classmethod(x)' + note, lines) + self.assertIn(' | A_method_ref = A_method() method of test.test_pydoc.pydocfodder.A instance', lines) + self.assertIn(' | get(key, default=None, /) method of builtins.dict instance', lines) + self.assertIn(' | dict_get = get(key, default=None, /) method of builtins.dict instance', lines) + + lines = self.getsection(result, f' | Class methods {where}:', ' | ' + '-'*70) + self.assertIn(' | B_classmethod(x)', lines) + self.assertIn(' | B_classmethod_alias = B_classmethod(x)', lines) + + def test_html_doc_routines_in_class(self, cls=pydocfodder.B): + doc = pydoc.HTMLDoc() + result = doc.docclass(cls) + result = html2text(result) + where = 'defined here' if cls is pydocfodder.B else 'inherited from B' + lines = self.getsection(result, f'Methods {where}:', '-'*70) + self.assertIn('A_method_alias = A_method(self)', lines) + self.assertIn('B_method_alias = B_method(self)', lines) + self.assertIn('A_staticmethod(x, y) from test.test_pydoc.pydocfodder.A', lines) + self.assertIn('A_staticmethod_alias = A_staticmethod(x, y)', lines) + self.assertIn('global_func(x, y) from test.test_pydoc.pydocfodder', lines) + self.assertIn('global_func_alias = global_func(x, y)', lines) + self.assertIn('global_func2_alias = global_func2(x, y) from test.test_pydoc.pydocfodder', lines) + self.assertIn('__repr__(self, /) from builtins.object', lines) + self.assertIn('object_repr = __repr__(self, /)', lines) + + lines = self.getsection(result, f'Static methods {where}:', '-'*70) + self.assertIn('A_classmethod_ref = A_classmethod(x) class method of test.test_pydoc.pydocfodder.A', lines) + note = '' if cls is pydocfodder.B else ' class method of test.test_pydoc.pydocfodder.B' + self.assertIn('B_classmethod_ref = B_classmethod(x)' + note, lines) + self.assertIn('A_method_ref = A_method() method of test.test_pydoc.pydocfodder.A instance', lines) + + lines = self.getsection(result, f'Class methods {where}:', '-'*70) + self.assertIn('B_classmethod(x)', lines) + self.assertIn('B_classmethod_alias = B_classmethod(x)', lines) + + def test_text_doc_inherited_routines_in_class(self): + self.test_text_doc_routines_in_class(pydocfodder.D) + + def test_html_doc_inherited_routines_in_class(self): + self.test_html_doc_routines_in_class(pydocfodder.D) + + def test_text_doc_routines_in_module(self): + doc = pydoc.TextDoc() + result = doc.docmodule(pydocfodder) + result = clean_text(result) + lines = self.getsection(result, 'FUNCTIONS', 'FILE') + # function alias + self.assertIn(' global_func_alias = global_func(x, y)', lines) + self.assertIn(' A_staticmethod(x, y)', lines) + self.assertIn(' A_staticmethod_alias = A_staticmethod(x, y)', lines) + # bound class methods + self.assertIn(' A_classmethod(x) class method of A', lines) + self.assertIn(' A_classmethod2 = A_classmethod(x) class method of A', lines) + self.assertIn(' A_classmethod3 = A_classmethod(x) class method of B', lines) + # bound methods + self.assertIn(' A_method() method of A instance', lines) + self.assertIn(' A_method2 = A_method() method of A instance', lines) + self.assertIn(' A_method3 = A_method() method of B instance', lines) + self.assertIn(' A_staticmethod_ref = A_staticmethod(x, y)', lines) + self.assertIn(' A_staticmethod_ref2 = A_staticmethod(y) method of B instance', lines) + self.assertIn(' get(key, default=None, /) method of builtins.dict instance', lines) + self.assertIn(' dict_get = get(key, default=None, /) method of builtins.dict instance', lines) + # unbound methods + self.assertIn(' B_method(self)', lines) + self.assertIn(' B_method2 = B_method(self)', lines) + + def test_html_doc_routines_in_module(self): + doc = pydoc.HTMLDoc() + result = doc.docmodule(pydocfodder) + result = html2text(result) + lines = self.getsection(result, ' Functions', None) + # function alias + self.assertIn(' global_func_alias = global_func(x, y)', lines) + self.assertIn(' A_staticmethod(x, y)', lines) + self.assertIn(' A_staticmethod_alias = A_staticmethod(x, y)', lines) + # bound class methods + self.assertIn('A_classmethod(x) class method of A', lines) + self.assertIn(' A_classmethod2 = A_classmethod(x) class method of A', lines) + self.assertIn(' A_classmethod3 = A_classmethod(x) class method of B', lines) + # bound methods + self.assertIn(' A_method() method of A instance', lines) + self.assertIn(' A_method2 = A_method() method of A instance', lines) + self.assertIn(' A_method3 = A_method() method of B instance', lines) + self.assertIn(' A_staticmethod_ref = A_staticmethod(x, y)', lines) + self.assertIn(' A_staticmethod_ref2 = A_staticmethod(y) method of B instance', lines) + self.assertIn(' get(key, default=None, /) method of builtins.dict instance', lines) + self.assertIn(' dict_get = get(key, default=None, /) method of builtins.dict instance', lines) + # unbound methods + self.assertIn(' B_method(self)', lines) + self.assertIn(' B_method2 = B_method(self)', lines) + + +@unittest.skipIf( + is_emscripten or is_wasi, + "Socket server not available on Emscripten/WASI." +) +class PydocServerTest(unittest.TestCase): + """Tests for pydoc._start_server""" + + def test_server(self): + # Minimal test that starts the server, checks that it works, then stops + # it and checks its cleanup. + def my_url_handler(url, content_type): + text = 'the URL sent was: (%s, %s)' % (url, content_type) + return text + + serverthread = pydoc._start_server( + my_url_handler, + hostname='localhost', + port=0, + ) + self.assertEqual(serverthread.error, None) + self.assertTrue(serverthread.serving) + self.addCleanup( + lambda: serverthread.stop() if serverthread.serving else None + ) + self.assertIn('localhost', serverthread.url) + + self.addCleanup(urlcleanup) + self.assertEqual( + b'the URL sent was: (/test, text/html)', + urlopen(urllib.parse.urljoin(serverthread.url, '/test')).read(), + ) + self.assertEqual( + b'the URL sent was: (/test.css, text/css)', + urlopen(urllib.parse.urljoin(serverthread.url, '/test.css')).read(), + ) + + serverthread.stop() + self.assertFalse(serverthread.serving) + self.assertIsNone(serverthread.docserver) + self.assertIsNone(serverthread.url) + + +class PydocUrlHandlerTest(PydocBaseTest): + """Tests for pydoc._url_handler""" + + def test_content_type_err(self): + f = pydoc._url_handler + self.assertRaises(TypeError, f, 'A', '') + self.assertRaises(TypeError, f, 'B', 'foobar') + + def test_url_requests(self): + # Test for the correct title in the html pages returned. + # This tests the different parts of the URL handler without + # getting too picky about the exact html. + requests = [ + ("", "Pydoc: Index of Modules"), + ("get?key=", "Pydoc: Index of Modules"), + ("index", "Pydoc: Index of Modules"), + ("topics", "Pydoc: Topics"), + ("keywords", "Pydoc: Keywords"), + ("pydoc", "Pydoc: module pydoc"), + ("get?key=pydoc", "Pydoc: module pydoc"), + ("search?key=pydoc", "Pydoc: Search Results"), + ("topic?key=def", "Pydoc: KEYWORD def"), + ("topic?key=STRINGS", "Pydoc: TOPIC STRINGS"), + ("foobar", "Pydoc: Error - foobar"), + ] + + with self.restrict_walk_packages(): + for url, title in requests: + self.call_url_handler(url, title) + + +class TestHelper(unittest.TestCase): + def test_keywords(self): + self.assertEqual(sorted(pydoc.Helper.keywords), + sorted(keyword.kwlist)) + + +class PydocWithMetaClasses(unittest.TestCase): + @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(), + 'trace function introduces __locals__ unexpectedly') + @requires_docstrings + def test_DynamicClassAttribute(self): + class Meta(type): + def __getattr__(self, name): + if name == 'ham': + return 'spam' + return super().__getattr__(name) + class DA(metaclass=Meta): + @types.DynamicClassAttribute + def ham(self): + return 'eggs' + expected_text_data_docstrings = tuple('\n | ' + s if s else '' + for s in expected_data_docstrings) + output = StringIO() + helper = pydoc.Helper(output=output) + helper(DA) + expected_text = expected_dynamicattribute_pattern % ( + (__name__,) + expected_text_data_docstrings[:2]) + result = output.getvalue().strip() + self.assertEqual(expected_text, result) + + @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(), + 'trace function introduces __locals__ unexpectedly') + @requires_docstrings + def test_virtualClassAttributeWithOneMeta(self): + class Meta(type): + def __dir__(cls): + return ['__class__', '__module__', '__name__', 'LIFE'] + def __getattr__(self, name): + if name =='LIFE': + return 42 + return super().__getattr(name) + class Class(metaclass=Meta): + pass + output = StringIO() + helper = pydoc.Helper(output=output) + helper(Class) + expected_text = expected_virtualattribute_pattern1 % __name__ + result = output.getvalue().strip() + self.assertEqual(expected_text, result) + + @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(), + 'trace function introduces __locals__ unexpectedly') + @requires_docstrings + def test_virtualClassAttributeWithTwoMeta(self): + class Meta1(type): + def __dir__(cls): + return ['__class__', '__module__', '__name__', 'one'] + def __getattr__(self, name): + if name =='one': + return 1 + return super().__getattr__(name) + class Meta2(type): + def __dir__(cls): + return ['__class__', '__module__', '__name__', 'two'] + def __getattr__(self, name): + if name =='two': + return 2 + return super().__getattr__(name) + class Meta3(Meta1, Meta2): + def __dir__(cls): + return list(sorted(set( + ['__class__', '__module__', '__name__', 'three'] + + Meta1.__dir__(cls) + Meta2.__dir__(cls)))) + def __getattr__(self, name): + if name =='three': + return 3 + return super().__getattr__(name) + class Class1(metaclass=Meta1): + pass + class Class2(Class1, metaclass=Meta3): + pass + output = StringIO() + helper = pydoc.Helper(output=output) + helper(Class1) + expected_text1 = expected_virtualattribute_pattern2 % __name__ + result1 = output.getvalue().strip() + self.assertEqual(expected_text1, result1) + output = StringIO() + helper = pydoc.Helper(output=output) + helper(Class2) + expected_text2 = expected_virtualattribute_pattern3 % __name__ + result2 = output.getvalue().strip() + self.assertEqual(expected_text2, result2) + + @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(), + 'trace function introduces __locals__ unexpectedly') + @requires_docstrings + def test_buggy_dir(self): + class M(type): + def __dir__(cls): + return ['__class__', '__name__', 'missing', 'here'] + class C(metaclass=M): + here = 'present!' + output = StringIO() + helper = pydoc.Helper(output=output) + helper(C) + expected_text = expected_missingattribute_pattern % __name__ + result = output.getvalue().strip() + self.assertEqual(expected_text, result) + + def test_resolve_false(self): + # Issue #23008: pydoc enum.{,Int}Enum failed + # because bool(enum.Enum) is False. + with captured_stdout() as help_io: + pydoc.help('enum.Enum') + helptext = help_io.getvalue() + self.assertIn('class Enum', helptext) + + +class TestInternalUtilities(unittest.TestCase): + + def setUp(self): + tmpdir = tempfile.TemporaryDirectory() + self.argv0dir = tmpdir.name + self.argv0 = os.path.join(tmpdir.name, "nonexistent") + self.addCleanup(tmpdir.cleanup) + self.abs_curdir = abs_curdir = os.getcwd() + self.curdir_spellings = ["", os.curdir, abs_curdir] + + def _get_revised_path(self, given_path, argv0=None): + # Checking that pydoc.cli() actually calls pydoc._get_revised_path() + # is handled via code review (at least for now). + if argv0 is None: + argv0 = self.argv0 + return pydoc._get_revised_path(given_path, argv0) + + def _get_starting_path(self): + # Get a copy of sys.path without the current directory. + clean_path = sys.path.copy() + for spelling in self.curdir_spellings: + for __ in range(clean_path.count(spelling)): + clean_path.remove(spelling) + return clean_path + + def test_sys_path_adjustment_adds_missing_curdir(self): + clean_path = self._get_starting_path() + expected_path = [self.abs_curdir] + clean_path + self.assertEqual(self._get_revised_path(clean_path), expected_path) + + def test_sys_path_adjustment_removes_argv0_dir(self): + clean_path = self._get_starting_path() + expected_path = [self.abs_curdir] + clean_path + leading_argv0dir = [self.argv0dir] + clean_path + self.assertEqual(self._get_revised_path(leading_argv0dir), expected_path) + trailing_argv0dir = clean_path + [self.argv0dir] + self.assertEqual(self._get_revised_path(trailing_argv0dir), expected_path) + + def test_sys_path_adjustment_protects_pydoc_dir(self): + def _get_revised_path(given_path): + return self._get_revised_path(given_path, argv0=pydoc.__file__) + clean_path = self._get_starting_path() + leading_argv0dir = [self.argv0dir] + clean_path + expected_path = [self.abs_curdir] + leading_argv0dir + self.assertEqual(_get_revised_path(leading_argv0dir), expected_path) + trailing_argv0dir = clean_path + [self.argv0dir] + expected_path = [self.abs_curdir] + trailing_argv0dir + self.assertEqual(_get_revised_path(trailing_argv0dir), expected_path) + + def test_sys_path_adjustment_when_curdir_already_included(self): + clean_path = self._get_starting_path() + for spelling in self.curdir_spellings: + with self.subTest(curdir_spelling=spelling): + # If curdir is already present, no alterations are made at all + leading_curdir = [spelling] + clean_path + self.assertIsNone(self._get_revised_path(leading_curdir)) + trailing_curdir = clean_path + [spelling] + self.assertIsNone(self._get_revised_path(trailing_curdir)) + leading_argv0dir = [self.argv0dir] + leading_curdir + self.assertIsNone(self._get_revised_path(leading_argv0dir)) + trailing_argv0dir = trailing_curdir + [self.argv0dir] + self.assertIsNone(self._get_revised_path(trailing_argv0dir)) + + +def setUpModule(): + thread_info = threading_helper.threading_setup() + unittest.addModuleCleanup(threading_helper.threading_cleanup, *thread_info) + unittest.addModuleCleanup(reap_children) + + +if __name__ == "__main__": + unittest.main() diff -Nru python3.11-3.11.8/Lib/test/test_pydoc.py python3.11-3.11.9/Lib/test/test_pydoc.py --- python3.11-3.11.8/Lib/test/test_pydoc.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_pydoc.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,1644 +0,0 @@ -import os -import sys -import contextlib -import importlib.util -import inspect -import pydoc -import py_compile -import keyword -import _pickle -import pkgutil -import re -import stat -import tempfile -import test.support -import types -import typing -import unittest -import urllib.parse -import xml.etree -import xml.etree.ElementTree -import textwrap -from io import StringIO -from collections import namedtuple -from urllib.request import urlopen, urlcleanup -from test.support import import_helper -from test.support import os_helper -from test.support.script_helper import (assert_python_ok, - assert_python_failure, spawn_python) -from test.support import threading_helper -from test.support import (reap_children, captured_output, captured_stdout, - captured_stderr, is_emscripten, is_wasi, - requires_docstrings, MISSING_C_DOCSTRINGS) -from test.support.os_helper import (TESTFN, rmtree, unlink) -from test import pydoc_mod - - -class nonascii: - 'Це не латиниця' - pass - -if test.support.HAVE_DOCSTRINGS: - expected_data_docstrings = ( - 'dictionary for instance variables', - 'list of weak references to the object', - ) * 2 -else: - expected_data_docstrings = ('', '', '', '') - -expected_text_pattern = """ -NAME - test.pydoc_mod - This is a test module for test_pydoc -%s -CLASSES - builtins.object - A - B - C -\x20\x20\x20\x20 - class A(builtins.object) - | Hello and goodbye - |\x20\x20 - | Methods defined here: - |\x20\x20 - | __init__() - | Wow, I have no function! - |\x20\x20 - | ---------------------------------------------------------------------- - | Data descriptors defined here: - |\x20\x20 - | __dict__%s - |\x20\x20 - | __weakref__%s -\x20\x20\x20\x20 - class B(builtins.object) - | Data descriptors defined here: - |\x20\x20 - | __dict__%s - |\x20\x20 - | __weakref__%s - |\x20\x20 - | ---------------------------------------------------------------------- - | Data and other attributes defined here: - |\x20\x20 - | NO_MEANING = 'eggs' - |\x20\x20 - | __annotations__ = {'NO_MEANING': } -\x20\x20\x20\x20 - class C(builtins.object) - | Methods defined here: - |\x20\x20 - | get_answer(self) - | Return say_no() - |\x20\x20 - | is_it_true(self) - | Return self.get_answer() - |\x20\x20 - | say_no(self) - |\x20\x20 - | ---------------------------------------------------------------------- - | Class methods defined here: - |\x20\x20 - | __class_getitem__(item) from builtins.type - |\x20\x20 - | ---------------------------------------------------------------------- - | Data descriptors defined here: - |\x20\x20 - | __dict__ - | dictionary for instance variables - |\x20\x20 - | __weakref__ - | list of weak references to the object - -FUNCTIONS - doc_func() - This function solves all of the world's problems: - hunger - lack of Python - war -\x20\x20\x20\x20 - nodoc_func() - -DATA - __xyz__ = 'X, Y and Z' - c_alias = test.pydoc_mod.C[int] - list_alias1 = typing.List[int] - list_alias2 = list[int] - type_union1 = typing.Union[int, str] - type_union2 = int | str - -VERSION - 1.2.3.4 - -AUTHOR - Benjamin Peterson - -CREDITS - Nobody - -FILE - %s -""".strip() - -expected_text_data_docstrings = tuple('\n | ' + s if s else '' - for s in expected_data_docstrings) - -html2text_of_expected = """ -test.pydoc_mod (version 1.2.3.4) -This is a test module for test_pydoc - -Modules - types - typing - -Classes - builtins.object - A - B - C - -class A(builtins.object) - Hello and goodbye - - Methods defined here: - __init__() - Wow, I have no function! - - Data descriptors defined here: - __dict__ - dictionary for instance variables - __weakref__ - list of weak references to the object - -class B(builtins.object) - Data descriptors defined here: - __dict__ - dictionary for instance variables - __weakref__ - list of weak references to the object - Data and other attributes defined here: - NO_MEANING = 'eggs' - __annotations__ = {'NO_MEANING': } - - -class C(builtins.object) - Methods defined here: - get_answer(self) - Return say_no() - is_it_true(self) - Return self.get_answer() - say_no(self) - Class methods defined here: - __class_getitem__(item) from builtins.type - Data descriptors defined here: - __dict__ - dictionary for instance variables - __weakref__ - list of weak references to the object - -Functions - doc_func() - This function solves all of the world's problems: - hunger - lack of Python - war - nodoc_func() - -Data - __xyz__ = 'X, Y and Z' - c_alias = test.pydoc_mod.C[int] - list_alias1 = typing.List[int] - list_alias2 = list[int] - type_union1 = typing.Union[int, str] - type_union2 = int | str - -Author - Benjamin Peterson - -Credits - Nobody -""" - -expected_html_data_docstrings = tuple(s.replace(' ', ' ') - for s in expected_data_docstrings) - -# output pattern for missing module -missing_pattern = '''\ -No Python documentation found for %r. -Use help() to get the interactive help utility. -Use help(str) for help on the str class.'''.replace('\n', os.linesep) - -# output pattern for module with bad imports -badimport_pattern = "problem in %s - ModuleNotFoundError: No module named %r" - -expected_dynamicattribute_pattern = """ -Help on class DA in module %s: - -class DA(builtins.object) - | Data descriptors defined here: - |\x20\x20 - | __dict__%s - |\x20\x20 - | __weakref__%s - |\x20\x20 - | ham - |\x20\x20 - | ---------------------------------------------------------------------- - | Data and other attributes inherited from Meta: - |\x20\x20 - | ham = 'spam' -""".strip() - -expected_virtualattribute_pattern1 = """ -Help on class Class in module %s: - -class Class(builtins.object) - | Data and other attributes inherited from Meta: - |\x20\x20 - | LIFE = 42 -""".strip() - -expected_virtualattribute_pattern2 = """ -Help on class Class1 in module %s: - -class Class1(builtins.object) - | Data and other attributes inherited from Meta1: - |\x20\x20 - | one = 1 -""".strip() - -expected_virtualattribute_pattern3 = """ -Help on class Class2 in module %s: - -class Class2(Class1) - | Method resolution order: - | Class2 - | Class1 - | builtins.object - |\x20\x20 - | Data and other attributes inherited from Meta1: - |\x20\x20 - | one = 1 - |\x20\x20 - | ---------------------------------------------------------------------- - | Data and other attributes inherited from Meta3: - |\x20\x20 - | three = 3 - |\x20\x20 - | ---------------------------------------------------------------------- - | Data and other attributes inherited from Meta2: - |\x20\x20 - | two = 2 -""".strip() - -expected_missingattribute_pattern = """ -Help on class C in module %s: - -class C(builtins.object) - | Data and other attributes defined here: - |\x20\x20 - | here = 'present!' -""".strip() - -def run_pydoc(module_name, *args, **env): - """ - Runs pydoc on the specified module. Returns the stripped - output of pydoc. - """ - args = args + (module_name,) - # do not write bytecode files to avoid caching errors - rc, out, err = assert_python_ok('-B', pydoc.__file__, *args, **env) - return out.strip() - -def run_pydoc_fail(module_name, *args, **env): - """ - Runs pydoc on the specified module expecting a failure. - """ - args = args + (module_name,) - rc, out, err = assert_python_failure('-B', pydoc.__file__, *args, **env) - return out.strip() - -def get_pydoc_html(module): - "Returns pydoc generated output as html" - doc = pydoc.HTMLDoc() - output = doc.docmodule(module) - loc = doc.getdocloc(pydoc_mod) or "" - if loc: - loc = "
Module Docs" - return output.strip(), loc - -def get_pydoc_link(module): - "Returns a documentation web link of a module" - abspath = os.path.abspath - dirname = os.path.dirname - basedir = dirname(dirname(abspath(__file__))) - doc = pydoc.TextDoc() - loc = doc.getdocloc(module, basedir=basedir) - return loc - -def get_pydoc_text(module): - "Returns pydoc generated output as text" - doc = pydoc.TextDoc() - loc = doc.getdocloc(pydoc_mod) or "" - if loc: - loc = "\nMODULE DOCS\n " + loc + "\n" - - output = doc.docmodule(module) - - # clean up the extra text formatting that pydoc performs - patt = re.compile('\b.') - output = patt.sub('', output) - return output.strip(), loc - -def get_html_title(text): - # Bit of hack, but good enough for test purposes - header, _, _ = text.partition("") - _, _, title = header.partition("") - title, _, _ = title.partition("") - return title - - -def html2text(html): - """A quick and dirty implementation of html2text. - - Tailored for pydoc tests only. - """ - html = html.replace("
", "\n") - html = re.sub("<.*?>", "", html) - html = pydoc.replace(html, " ", " ", ">", ">", "<", "<") - return html - - -class PydocBaseTest(unittest.TestCase): - - def _restricted_walk_packages(self, walk_packages, path=None): - """ - A version of pkgutil.walk_packages() that will restrict itself to - a given path. - """ - default_path = path or [os.path.dirname(__file__)] - def wrapper(path=None, prefix='', onerror=None): - return walk_packages(path or default_path, prefix, onerror) - return wrapper - - @contextlib.contextmanager - def restrict_walk_packages(self, path=None): - walk_packages = pkgutil.walk_packages - pkgutil.walk_packages = self._restricted_walk_packages(walk_packages, - path) - try: - yield - finally: - pkgutil.walk_packages = walk_packages - - def call_url_handler(self, url, expected_title): - text = pydoc._url_handler(url, "text/html") - result = get_html_title(text) - # Check the title to ensure an unexpected error page was not returned - self.assertEqual(result, expected_title, text) - return text - - -class PydocDocTest(unittest.TestCase): - maxDiff = None - - @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(), - 'trace function introduces __locals__ unexpectedly') - @requires_docstrings - def test_html_doc(self): - result, doc_loc = get_pydoc_html(pydoc_mod) - text_result = html2text(result) - text_lines = [line.strip() for line in text_result.splitlines()] - text_lines = [line for line in text_lines if line] - del text_lines[1] - expected_lines = html2text_of_expected.splitlines() - expected_lines = [line.strip() for line in expected_lines if line] - self.assertEqual(text_lines, expected_lines) - mod_file = inspect.getabsfile(pydoc_mod) - mod_url = urllib.parse.quote(mod_file) - self.assertIn(mod_url, result) - self.assertIn(mod_file, result) - self.assertIn(doc_loc, result) - - @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(), - 'trace function introduces __locals__ unexpectedly') - @requires_docstrings - def test_text_doc(self): - result, doc_loc = get_pydoc_text(pydoc_mod) - expected_text = expected_text_pattern % ( - (doc_loc,) + - expected_text_data_docstrings + - (inspect.getabsfile(pydoc_mod),)) - self.assertEqual(expected_text, result) - - def test_text_enum_member_with_value_zero(self): - # Test issue #20654 to ensure enum member with value 0 can be - # displayed. It used to throw KeyError: 'zero'. - import enum - class BinaryInteger(enum.IntEnum): - zero = 0 - one = 1 - doc = pydoc.render_doc(BinaryInteger) - self.assertIn('BinaryInteger.zero', doc) - - def test_mixed_case_module_names_are_lower_cased(self): - # issue16484 - doc_link = get_pydoc_link(xml.etree.ElementTree) - self.assertIn('xml.etree.elementtree', doc_link) - - def test_issue8225(self): - # Test issue8225 to ensure no doc link appears for xml.etree - result, doc_loc = get_pydoc_text(xml.etree) - self.assertEqual(doc_loc, "", "MODULE DOCS incorrectly includes a link") - - def test_getpager_with_stdin_none(self): - previous_stdin = sys.stdin - try: - sys.stdin = None - pydoc.getpager() # Shouldn't fail. - finally: - sys.stdin = previous_stdin - - def test_non_str_name(self): - # issue14638 - # Treat illegal (non-str) name like no name - - class A: - __name__ = 42 - class B: - pass - adoc = pydoc.render_doc(A()) - bdoc = pydoc.render_doc(B()) - self.assertEqual(adoc.replace("A", "B"), bdoc) - - def test_not_here(self): - missing_module = "test.i_am_not_here" - result = str(run_pydoc_fail(missing_module), 'ascii') - expected = missing_pattern % missing_module - self.assertEqual(expected, result, - "documentation for missing module found") - - @requires_docstrings - def test_not_ascii(self): - result = run_pydoc('test.test_pydoc.nonascii', PYTHONIOENCODING='ascii') - encoded = nonascii.__doc__.encode('ascii', 'backslashreplace') - self.assertIn(encoded, result) - - def test_input_strip(self): - missing_module = " test.i_am_not_here " - result = str(run_pydoc_fail(missing_module), 'ascii') - expected = missing_pattern % missing_module.strip() - self.assertEqual(expected, result) - - def test_stripid(self): - # test with strings, other implementations might have different repr() - stripid = pydoc.stripid - # strip the id - self.assertEqual(stripid(''), - '') - self.assertEqual(stripid(''), - '') - # nothing to strip, return the same text - self.assertEqual(stripid('42'), '42') - self.assertEqual(stripid(""), - "") - - def test_builtin_with_more_than_four_children(self): - """Tests help on builtin object which have more than four child classes. - - When running help() on a builtin class which has child classes, it - should contain a "Built-in subclasses" section and only 4 classes - should be displayed with a hint on how many more subclasses are present. - For example: - - >>> help(object) - Help on class object in module builtins: - - class object - | The most base type - | - | Built-in subclasses: - | async_generator - | BaseException - | builtin_function_or_method - | bytearray - | ... and 82 other subclasses - """ - doc = pydoc.TextDoc() - text = doc.docclass(object) - snip = (" | Built-in subclasses:\n" - " | async_generator\n" - " | BaseException\n" - " | builtin_function_or_method\n" - " | bytearray\n" - " | ... and \\d+ other subclasses") - self.assertRegex(text, snip) - - def test_builtin_with_child(self): - """Tests help on builtin object which have only child classes. - - When running help() on a builtin class which has child classes, it - should contain a "Built-in subclasses" section. For example: - - >>> help(ArithmeticError) - Help on class ArithmeticError in module builtins: - - class ArithmeticError(Exception) - | Base class for arithmetic errors. - | - ... - | - | Built-in subclasses: - | FloatingPointError - | OverflowError - | ZeroDivisionError - """ - doc = pydoc.TextDoc() - text = doc.docclass(ArithmeticError) - snip = (" | Built-in subclasses:\n" - " | FloatingPointError\n" - " | OverflowError\n" - " | ZeroDivisionError") - self.assertIn(snip, text) - - def test_builtin_with_grandchild(self): - """Tests help on builtin classes which have grandchild classes. - - When running help() on a builtin class which has child classes, it - should contain a "Built-in subclasses" section. However, if it also has - grandchildren, these should not show up on the subclasses section. - For example: - - >>> help(Exception) - Help on class Exception in module builtins: - - class Exception(BaseException) - | Common base class for all non-exit exceptions. - | - ... - | - | Built-in subclasses: - | ArithmeticError - | AssertionError - | AttributeError - ... - """ - doc = pydoc.TextDoc() - text = doc.docclass(Exception) - snip = (" | Built-in subclasses:\n" - " | ArithmeticError\n" - " | AssertionError\n" - " | AttributeError") - self.assertIn(snip, text) - # Testing that the grandchild ZeroDivisionError does not show up - self.assertNotIn('ZeroDivisionError', text) - - def test_builtin_no_child(self): - """Tests help on builtin object which have no child classes. - - When running help() on a builtin class which has no child classes, it - should not contain any "Built-in subclasses" section. For example: - - >>> help(ZeroDivisionError) - - Help on class ZeroDivisionError in module builtins: - - class ZeroDivisionError(ArithmeticError) - | Second argument to a division or modulo operation was zero. - | - | Method resolution order: - | ZeroDivisionError - | ArithmeticError - | Exception - | BaseException - | object - | - | Methods defined here: - ... - """ - doc = pydoc.TextDoc() - text = doc.docclass(ZeroDivisionError) - # Testing that the subclasses section does not appear - self.assertNotIn('Built-in subclasses', text) - - def test_builtin_on_metaclasses(self): - """Tests help on metaclasses. - - When running help() on a metaclasses such as type, it - should not contain any "Built-in subclasses" section. - """ - doc = pydoc.TextDoc() - text = doc.docclass(type) - # Testing that the subclasses section does not appear - self.assertNotIn('Built-in subclasses', text) - - def test_fail_help_cli(self): - elines = (missing_pattern % 'abd').splitlines() - with spawn_python("-c" "help()") as proc: - out, _ = proc.communicate(b"abd") - olines = out.decode().splitlines()[-9:-6] - olines[0] = olines[0].removeprefix('help> ') - self.assertEqual(elines, olines) - - def test_fail_help_output_redirect(self): - with StringIO() as buf: - helper = pydoc.Helper(output=buf) - helper.help("abd") - expected = missing_pattern % "abd" - self.assertEqual(expected, buf.getvalue().strip().replace('\n', os.linesep)) - - @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(), - 'trace function introduces __locals__ unexpectedly') - @requires_docstrings - def test_help_output_redirect(self): - # issue 940286, if output is set in Helper, then all output from - # Helper.help should be redirected - getpager_old = pydoc.getpager - getpager_new = lambda: (lambda x: x) - self.maxDiff = None - - buf = StringIO() - helper = pydoc.Helper(output=buf) - unused, doc_loc = get_pydoc_text(pydoc_mod) - module = "test.pydoc_mod" - help_header = """ - Help on module test.pydoc_mod in test: - - """.lstrip() - help_header = textwrap.dedent(help_header) - expected_help_pattern = help_header + expected_text_pattern - - pydoc.getpager = getpager_new - try: - with captured_output('stdout') as output, \ - captured_output('stderr') as err: - helper.help(module) - result = buf.getvalue().strip() - expected_text = expected_help_pattern % ( - (doc_loc,) + - expected_text_data_docstrings + - (inspect.getabsfile(pydoc_mod),)) - self.assertEqual('', output.getvalue()) - self.assertEqual('', err.getvalue()) - self.assertEqual(expected_text, result) - finally: - pydoc.getpager = getpager_old - - def test_namedtuple_fields(self): - Person = namedtuple('Person', ['nickname', 'firstname']) - with captured_stdout() as help_io: - pydoc.help(Person) - helptext = help_io.getvalue() - self.assertIn("nickname", helptext) - self.assertIn("firstname", helptext) - self.assertIn("Alias for field number 0", helptext) - self.assertIn("Alias for field number 1", helptext) - - def test_namedtuple_public_underscore(self): - NT = namedtuple('NT', ['abc', 'def'], rename=True) - with captured_stdout() as help_io: - pydoc.help(NT) - helptext = help_io.getvalue() - self.assertIn('_1', helptext) - self.assertIn('_replace', helptext) - self.assertIn('_asdict', helptext) - - def test_synopsis(self): - self.addCleanup(unlink, TESTFN) - for encoding in ('ISO-8859-1', 'UTF-8'): - with open(TESTFN, 'w', encoding=encoding) as script: - if encoding != 'UTF-8': - print('#coding: {}'.format(encoding), file=script) - print('"""line 1: h\xe9', file=script) - print('line 2: hi"""', file=script) - synopsis = pydoc.synopsis(TESTFN, {}) - self.assertEqual(synopsis, 'line 1: h\xe9') - - @requires_docstrings - def test_synopsis_sourceless(self): - os = import_helper.import_fresh_module('os') - expected = os.__doc__.splitlines()[0] - filename = os.__cached__ - synopsis = pydoc.synopsis(filename) - - self.assertEqual(synopsis, expected) - - def test_synopsis_sourceless_empty_doc(self): - with os_helper.temp_cwd() as test_dir: - init_path = os.path.join(test_dir, 'foomod42.py') - cached_path = importlib.util.cache_from_source(init_path) - with open(init_path, 'w') as fobj: - fobj.write("foo = 1") - py_compile.compile(init_path) - synopsis = pydoc.synopsis(init_path, {}) - self.assertIsNone(synopsis) - synopsis_cached = pydoc.synopsis(cached_path, {}) - self.assertIsNone(synopsis_cached) - - def test_splitdoc_with_description(self): - example_string = "I Am A Doc\n\n\nHere is my description" - self.assertEqual(pydoc.splitdoc(example_string), - ('I Am A Doc', '\nHere is my description')) - - def test_is_package_when_not_package(self): - with os_helper.temp_cwd() as test_dir: - self.assertFalse(pydoc.ispackage(test_dir)) - - def test_is_package_when_is_package(self): - with os_helper.temp_cwd() as test_dir: - init_path = os.path.join(test_dir, '__init__.py') - open(init_path, 'w').close() - self.assertTrue(pydoc.ispackage(test_dir)) - os.remove(init_path) - - def test_allmethods(self): - # issue 17476: allmethods was no longer returning unbound methods. - # This test is a bit fragile in the face of changes to object and type, - # but I can't think of a better way to do it without duplicating the - # logic of the function under test. - - class TestClass(object): - def method_returning_true(self): - return True - - # What we expect to get back: everything on object... - expected = dict(vars(object)) - # ...plus our unbound method... - expected['method_returning_true'] = TestClass.method_returning_true - # ...but not the non-methods on object. - del expected['__doc__'] - del expected['__class__'] - # inspect resolves descriptors on type into methods, but vars doesn't, - # so we need to update __subclasshook__ and __init_subclass__. - expected['__subclasshook__'] = TestClass.__subclasshook__ - expected['__init_subclass__'] = TestClass.__init_subclass__ - - methods = pydoc.allmethods(TestClass) - self.assertDictEqual(methods, expected) - - @requires_docstrings - def test_method_aliases(self): - class A: - def tkraise(self, aboveThis=None): - """Raise this widget in the stacking order.""" - lift = tkraise - def a_size(self): - """Return size""" - class B(A): - def itemconfigure(self, tagOrId, cnf=None, **kw): - """Configure resources of an item TAGORID.""" - itemconfig = itemconfigure - b_size = A.a_size - - doc = pydoc.render_doc(B) - # clean up the extra text formatting that pydoc performs - doc = re.sub('\b.', '', doc) - self.assertEqual(doc, '''\ -Python Library Documentation: class B in module %s - -class B(A) - | Method resolution order: - | B - | A - | builtins.object - |\x20\x20 - | Methods defined here: - |\x20\x20 - | b_size = a_size(self) - |\x20\x20 - | itemconfig = itemconfigure(self, tagOrId, cnf=None, **kw) - |\x20\x20 - | itemconfigure(self, tagOrId, cnf=None, **kw) - | Configure resources of an item TAGORID. - |\x20\x20 - | ---------------------------------------------------------------------- - | Methods inherited from A: - |\x20\x20 - | a_size(self) - | Return size - |\x20\x20 - | lift = tkraise(self, aboveThis=None) - |\x20\x20 - | tkraise(self, aboveThis=None) - | Raise this widget in the stacking order. - |\x20\x20 - | ---------------------------------------------------------------------- - | Data descriptors inherited from A: - |\x20\x20 - | __dict__ - | dictionary for instance variables - |\x20\x20 - | __weakref__ - | list of weak references to the object -''' % __name__) - - doc = pydoc.render_doc(B, renderer=pydoc.HTMLDoc()) - expected_text = f""" -Python Library Documentation - -class B in module {__name__} -class B(A) - Method resolution order: - B - A - builtins.object - - Methods defined here: - b_size = a_size(self) - itemconfig = itemconfigure(self, tagOrId, cnf=None, **kw) - itemconfigure(self, tagOrId, cnf=None, **kw) - Configure resources of an item TAGORID. - - Methods inherited from A: - a_size(self) - Return size - lift = tkraise(self, aboveThis=None) - tkraise(self, aboveThis=None) - Raise this widget in the stacking order. - - Data descriptors inherited from A: - __dict__ - dictionary for instance variables - __weakref__ - list of weak references to the object -""" - as_text = html2text(doc) - expected_lines = [line.strip() for line in expected_text.split("\n") if line] - for expected_line in expected_lines: - self.assertIn(expected_line, as_text) - - def test__future__imports(self): - # __future__ features are excluded from module help, - # except when it's the __future__ module itself - import __future__ - future_text, _ = get_pydoc_text(__future__) - future_html, _ = get_pydoc_html(__future__) - pydoc_mod_text, _ = get_pydoc_text(pydoc_mod) - pydoc_mod_html, _ = get_pydoc_html(pydoc_mod) - - for feature in __future__.all_feature_names: - txt = f"{feature} = _Feature" - html = f"{feature} = _Feature" - self.assertIn(txt, future_text) - self.assertIn(html, future_html) - self.assertNotIn(txt, pydoc_mod_text) - self.assertNotIn(html, pydoc_mod_html) - - -class PydocImportTest(PydocBaseTest): - - def setUp(self): - self.test_dir = os.mkdir(TESTFN) - self.addCleanup(rmtree, TESTFN) - importlib.invalidate_caches() - - def test_badimport(self): - # This tests the fix for issue 5230, where if pydoc found the module - # but the module had an internal import error pydoc would report no doc - # found. - modname = 'testmod_xyzzy' - testpairs = ( - ('i_am_not_here', 'i_am_not_here'), - ('test.i_am_not_here_either', 'test.i_am_not_here_either'), - ('test.i_am_not_here.neither_am_i', 'test.i_am_not_here'), - ('i_am_not_here.{}'.format(modname), 'i_am_not_here'), - ('test.{}'.format(modname), 'test.{}'.format(modname)), - ) - - sourcefn = os.path.join(TESTFN, modname) + os.extsep + "py" - for importstring, expectedinmsg in testpairs: - with open(sourcefn, 'w') as f: - f.write("import {}\n".format(importstring)) - result = run_pydoc_fail(modname, PYTHONPATH=TESTFN).decode("ascii") - expected = badimport_pattern % (modname, expectedinmsg) - self.assertEqual(expected, result) - - def test_apropos_with_bad_package(self): - # Issue 7425 - pydoc -k failed when bad package on path - pkgdir = os.path.join(TESTFN, "syntaxerr") - os.mkdir(pkgdir) - badsyntax = os.path.join(pkgdir, "__init__") + os.extsep + "py" - with open(badsyntax, 'w') as f: - f.write("invalid python syntax = $1\n") - with self.restrict_walk_packages(path=[TESTFN]): - with captured_stdout() as out: - with captured_stderr() as err: - pydoc.apropos('xyzzy') - # No result, no error - self.assertEqual(out.getvalue(), '') - self.assertEqual(err.getvalue(), '') - # The package name is still matched - with captured_stdout() as out: - with captured_stderr() as err: - pydoc.apropos('syntaxerr') - self.assertEqual(out.getvalue().strip(), 'syntaxerr') - self.assertEqual(err.getvalue(), '') - - def test_apropos_with_unreadable_dir(self): - # Issue 7367 - pydoc -k failed when unreadable dir on path - self.unreadable_dir = os.path.join(TESTFN, "unreadable") - os.mkdir(self.unreadable_dir, 0) - self.addCleanup(os.rmdir, self.unreadable_dir) - # Note, on Windows the directory appears to be still - # readable so this is not really testing the issue there - with self.restrict_walk_packages(path=[TESTFN]): - with captured_stdout() as out: - with captured_stderr() as err: - pydoc.apropos('SOMEKEY') - # No result, no error - self.assertEqual(out.getvalue(), '') - self.assertEqual(err.getvalue(), '') - - @os_helper.skip_unless_working_chmod - @unittest.skipIf(is_emscripten, "cannot remove x bit") - def test_apropos_empty_doc(self): - pkgdir = os.path.join(TESTFN, 'walkpkg') - os.mkdir(pkgdir) - self.addCleanup(rmtree, pkgdir) - init_path = os.path.join(pkgdir, '__init__.py') - with open(init_path, 'w') as fobj: - fobj.write("foo = 1") - current_mode = stat.S_IMODE(os.stat(pkgdir).st_mode) - try: - os.chmod(pkgdir, current_mode & ~stat.S_IEXEC) - with self.restrict_walk_packages(path=[TESTFN]), captured_stdout() as stdout: - pydoc.apropos('') - self.assertIn('walkpkg', stdout.getvalue()) - finally: - os.chmod(pkgdir, current_mode) - - def test_url_search_package_error(self): - # URL handler search should cope with packages that raise exceptions - pkgdir = os.path.join(TESTFN, "test_error_package") - os.mkdir(pkgdir) - init = os.path.join(pkgdir, "__init__.py") - with open(init, "wt", encoding="ascii") as f: - f.write("""raise ValueError("ouch")\n""") - with self.restrict_walk_packages(path=[TESTFN]): - # Package has to be importable for the error to have any effect - saved_paths = tuple(sys.path) - sys.path.insert(0, TESTFN) - try: - with self.assertRaisesRegex(ValueError, "ouch"): - import test_error_package # Sanity check - - text = self.call_url_handler("search?key=test_error_package", - "Pydoc: Search Results") - found = ('' - 'test_error_package') - self.assertIn(found, text) - finally: - sys.path[:] = saved_paths - - @unittest.skip('causes undesirable side-effects (#20128)') - def test_modules(self): - # See Helper.listmodules(). - num_header_lines = 2 - num_module_lines_min = 5 # Playing it safe. - num_footer_lines = 3 - expected = num_header_lines + num_module_lines_min + num_footer_lines - - output = StringIO() - helper = pydoc.Helper(output=output) - helper('modules') - result = output.getvalue().strip() - num_lines = len(result.splitlines()) - - self.assertGreaterEqual(num_lines, expected) - - @unittest.skip('causes undesirable side-effects (#20128)') - def test_modules_search(self): - # See Helper.listmodules(). - expected = 'pydoc - ' - - output = StringIO() - helper = pydoc.Helper(output=output) - with captured_stdout() as help_io: - helper('modules pydoc') - result = help_io.getvalue() - - self.assertIn(expected, result) - - @unittest.skip('some buildbots are not cooperating (#20128)') - def test_modules_search_builtin(self): - expected = 'gc - ' - - output = StringIO() - helper = pydoc.Helper(output=output) - with captured_stdout() as help_io: - helper('modules garbage') - result = help_io.getvalue() - - self.assertTrue(result.startswith(expected)) - - def test_importfile(self): - loaded_pydoc = pydoc.importfile(pydoc.__file__) - - self.assertIsNot(loaded_pydoc, pydoc) - self.assertEqual(loaded_pydoc.__name__, 'pydoc') - self.assertEqual(loaded_pydoc.__file__, pydoc.__file__) - self.assertEqual(loaded_pydoc.__spec__, pydoc.__spec__) - - -class TestDescriptions(unittest.TestCase): - - def test_module(self): - # Check that pydocfodder module can be described - from test import pydocfodder - doc = pydoc.render_doc(pydocfodder) - self.assertIn("pydocfodder", doc) - - def test_class(self): - class C: "New-style class" - c = C() - - self.assertEqual(pydoc.describe(C), 'class C') - self.assertEqual(pydoc.describe(c), 'C') - expected = 'C in module %s object' % __name__ - self.assertIn(expected, pydoc.render_doc(c)) - - def test_generic_alias(self): - self.assertEqual(pydoc.describe(typing.List[int]), '_GenericAlias') - doc = pydoc.render_doc(typing.List[int], renderer=pydoc.plaintext) - self.assertIn('_GenericAlias in module typing', doc) - self.assertIn('List = class list(object)', doc) - if not MISSING_C_DOCSTRINGS: - self.assertIn(list.__doc__.strip().splitlines()[0], doc) - - self.assertEqual(pydoc.describe(list[int]), 'GenericAlias') - doc = pydoc.render_doc(list[int], renderer=pydoc.plaintext) - self.assertIn('GenericAlias in module builtins', doc) - self.assertIn('\nclass list(object)', doc) - if not MISSING_C_DOCSTRINGS: - self.assertIn(list.__doc__.strip().splitlines()[0], doc) - - def test_union_type(self): - self.assertEqual(pydoc.describe(typing.Union[int, str]), '_UnionGenericAlias') - doc = pydoc.render_doc(typing.Union[int, str], renderer=pydoc.plaintext) - self.assertIn('_UnionGenericAlias in module typing', doc) - self.assertIn('Union = typing.Union', doc) - if typing.Union.__doc__: - self.assertIn(typing.Union.__doc__.strip().splitlines()[0], doc) - - self.assertEqual(pydoc.describe(int | str), 'UnionType') - doc = pydoc.render_doc(int | str, renderer=pydoc.plaintext) - self.assertIn('UnionType in module types object', doc) - self.assertIn('\nclass UnionType(builtins.object)', doc) - if not MISSING_C_DOCSTRINGS: - self.assertIn(types.UnionType.__doc__.strip().splitlines()[0], doc) - - def test_special_form(self): - self.assertEqual(pydoc.describe(typing.NoReturn), '_SpecialForm') - doc = pydoc.render_doc(typing.NoReturn, renderer=pydoc.plaintext) - self.assertIn('_SpecialForm in module typing', doc) - if typing.NoReturn.__doc__: - self.assertIn('NoReturn = typing.NoReturn', doc) - self.assertIn(typing.NoReturn.__doc__.strip().splitlines()[0], doc) - else: - self.assertIn('NoReturn = class _SpecialForm(_Final)', doc) - - def test_typing_pydoc(self): - def foo(data: typing.List[typing.Any], - x: int) -> typing.Iterator[typing.Tuple[int, typing.Any]]: - ... - T = typing.TypeVar('T') - class C(typing.Generic[T], typing.Mapping[int, str]): ... - self.assertEqual(pydoc.render_doc(foo).splitlines()[-1], - 'f\x08fo\x08oo\x08o(data: List[Any], x: int)' - ' -> Iterator[Tuple[int, Any]]') - self.assertEqual(pydoc.render_doc(C).splitlines()[2], - 'class C\x08C(collections.abc.Mapping, typing.Generic)') - - def test_builtin(self): - for name in ('str', 'str.translate', 'builtins.str', - 'builtins.str.translate'): - # test low-level function - self.assertIsNotNone(pydoc.locate(name)) - # test high-level function - try: - pydoc.render_doc(name) - except ImportError: - self.fail('finding the doc of {!r} failed'.format(name)) - - for name in ('notbuiltins', 'strrr', 'strr.translate', - 'str.trrrranslate', 'builtins.strrr', - 'builtins.str.trrranslate'): - self.assertIsNone(pydoc.locate(name)) - self.assertRaises(ImportError, pydoc.render_doc, name) - - @staticmethod - def _get_summary_line(o): - text = pydoc.plain(pydoc.render_doc(o)) - lines = text.split('\n') - assert len(lines) >= 2 - return lines[2] - - @staticmethod - def _get_summary_lines(o): - text = pydoc.plain(pydoc.render_doc(o)) - lines = text.split('\n') - return '\n'.join(lines[2:]) - - # these should include "self" - def test_unbound_python_method(self): - self.assertEqual(self._get_summary_line(textwrap.TextWrapper.wrap), - "wrap(self, text)") - - @requires_docstrings - def test_unbound_builtin_method(self): - self.assertEqual(self._get_summary_line(_pickle.Pickler.dump), - "dump(self, obj, /)") - - # these no longer include "self" - def test_bound_python_method(self): - t = textwrap.TextWrapper() - self.assertEqual(self._get_summary_line(t.wrap), - "wrap(text) method of textwrap.TextWrapper instance") - def test_field_order_for_named_tuples(self): - Person = namedtuple('Person', ['nickname', 'firstname', 'agegroup']) - s = pydoc.render_doc(Person) - self.assertLess(s.index('nickname'), s.index('firstname')) - self.assertLess(s.index('firstname'), s.index('agegroup')) - - class NonIterableFields: - _fields = None - - class NonHashableFields: - _fields = [[]] - - # Make sure these doesn't fail - pydoc.render_doc(NonIterableFields) - pydoc.render_doc(NonHashableFields) - - @requires_docstrings - def test_bound_builtin_method(self): - s = StringIO() - p = _pickle.Pickler(s) - self.assertEqual(self._get_summary_line(p.dump), - "dump(obj, /) method of _pickle.Pickler instance") - - # this should *never* include self! - @requires_docstrings - def test_module_level_callable(self): - self.assertEqual(self._get_summary_line(os.stat), - "stat(path, *, dir_fd=None, follow_symlinks=True)") - - @requires_docstrings - def test_staticmethod(self): - class X: - @staticmethod - def sm(x, y): - '''A static method''' - ... - self.assertEqual(self._get_summary_lines(X.__dict__['sm']), - 'sm(x, y)\n' - ' A static method\n') - self.assertEqual(self._get_summary_lines(X.sm), """\ -sm(x, y) - A static method -""") - self.assertIn(""" - | Static methods defined here: - |\x20\x20 - | sm(x, y) - | A static method -""", pydoc.plain(pydoc.render_doc(X))) - - @requires_docstrings - def test_classmethod(self): - class X: - @classmethod - def cm(cls, x): - '''A class method''' - ... - self.assertEqual(self._get_summary_lines(X.__dict__['cm']), - 'cm(...)\n' - ' A class method\n') - self.assertEqual(self._get_summary_lines(X.cm), """\ -cm(x) method of builtins.type instance - A class method -""") - self.assertIn(""" - | Class methods defined here: - |\x20\x20 - | cm(x) from builtins.type - | A class method -""", pydoc.plain(pydoc.render_doc(X))) - - @requires_docstrings - def test_getset_descriptor(self): - # Currently these attributes are implemented as getset descriptors - # in CPython. - self.assertEqual(self._get_summary_line(int.numerator), "numerator") - self.assertEqual(self._get_summary_line(float.real), "real") - self.assertEqual(self._get_summary_line(Exception.args), "args") - self.assertEqual(self._get_summary_line(memoryview.obj), "obj") - - @requires_docstrings - def test_member_descriptor(self): - # Currently these attributes are implemented as member descriptors - # in CPython. - self.assertEqual(self._get_summary_line(complex.real), "real") - self.assertEqual(self._get_summary_line(range.start), "start") - self.assertEqual(self._get_summary_line(slice.start), "start") - self.assertEqual(self._get_summary_line(property.fget), "fget") - self.assertEqual(self._get_summary_line(StopIteration.value), "value") - - @requires_docstrings - def test_slot_descriptor(self): - class Point: - __slots__ = 'x', 'y' - self.assertEqual(self._get_summary_line(Point.x), "x") - - @requires_docstrings - def test_dict_attr_descriptor(self): - class NS: - pass - self.assertEqual(self._get_summary_line(NS.__dict__['__dict__']), - "__dict__") - - @requires_docstrings - def test_structseq_member_descriptor(self): - self.assertEqual(self._get_summary_line(type(sys.hash_info).width), - "width") - self.assertEqual(self._get_summary_line(type(sys.flags).debug), - "debug") - self.assertEqual(self._get_summary_line(type(sys.version_info).major), - "major") - self.assertEqual(self._get_summary_line(type(sys.float_info).max), - "max") - - @requires_docstrings - def test_namedtuple_field_descriptor(self): - Box = namedtuple('Box', ('width', 'height')) - self.assertEqual(self._get_summary_lines(Box.width), """\ - Alias for field number 0 -""") - - @requires_docstrings - def test_property(self): - class Rect: - @property - def area(self): - '''Area of the rect''' - return self.w * self.h - - self.assertEqual(self._get_summary_lines(Rect.area), """\ - Area of the rect -""") - self.assertIn(""" - | area - | Area of the rect -""", pydoc.plain(pydoc.render_doc(Rect))) - - @requires_docstrings - def test_custom_non_data_descriptor(self): - class Descr: - def __get__(self, obj, cls): - if obj is None: - return self - return 42 - class X: - attr = Descr() - - self.assertEqual(self._get_summary_lines(X.attr), f"""\ -<{__name__}.TestDescriptions.test_custom_non_data_descriptor..Descr object>""") - - X.attr.__doc__ = 'Custom descriptor' - self.assertEqual(self._get_summary_lines(X.attr), f"""\ -<{__name__}.TestDescriptions.test_custom_non_data_descriptor..Descr object> - Custom descriptor -""") - - X.attr.__name__ = 'foo' - self.assertEqual(self._get_summary_lines(X.attr), """\ -foo(...) - Custom descriptor -""") - - @requires_docstrings - def test_custom_data_descriptor(self): - class Descr: - def __get__(self, obj, cls): - if obj is None: - return self - return 42 - def __set__(self, obj, cls): - 1/0 - class X: - attr = Descr() - - self.assertEqual(self._get_summary_lines(X.attr), "") - - X.attr.__doc__ = 'Custom descriptor' - self.assertEqual(self._get_summary_lines(X.attr), """\ - Custom descriptor -""") - - X.attr.__name__ = 'foo' - self.assertEqual(self._get_summary_lines(X.attr), """\ -foo - Custom descriptor -""") - - def test_async_annotation(self): - async def coro_function(ign) -> int: - return 1 - - text = pydoc.plain(pydoc.plaintext.document(coro_function)) - self.assertIn('async coro_function', text) - - html = pydoc.HTMLDoc().document(coro_function) - self.assertIn( - 'async coro_function', - html) - - def test_async_generator_annotation(self): - async def an_async_generator(): - yield 1 - - text = pydoc.plain(pydoc.plaintext.document(an_async_generator)) - self.assertIn('async an_async_generator', text) - - html = pydoc.HTMLDoc().document(an_async_generator) - self.assertIn( - 'async an_async_generator', - html) - - @requires_docstrings - def test_html_for_https_links(self): - def a_fn_with_https_link(): - """a link https://localhost/""" - pass - - html = pydoc.HTMLDoc().document(a_fn_with_https_link) - self.assertIn( - 'https://localhost/', - html - ) - - -@unittest.skipIf( - is_emscripten or is_wasi, - "Socket server not available on Emscripten/WASI." -) -class PydocServerTest(unittest.TestCase): - """Tests for pydoc._start_server""" - - def test_server(self): - # Minimal test that starts the server, checks that it works, then stops - # it and checks its cleanup. - def my_url_handler(url, content_type): - text = 'the URL sent was: (%s, %s)' % (url, content_type) - return text - - serverthread = pydoc._start_server( - my_url_handler, - hostname='localhost', - port=0, - ) - self.assertEqual(serverthread.error, None) - self.assertTrue(serverthread.serving) - self.addCleanup( - lambda: serverthread.stop() if serverthread.serving else None - ) - self.assertIn('localhost', serverthread.url) - - self.addCleanup(urlcleanup) - self.assertEqual( - b'the URL sent was: (/test, text/html)', - urlopen(urllib.parse.urljoin(serverthread.url, '/test')).read(), - ) - self.assertEqual( - b'the URL sent was: (/test.css, text/css)', - urlopen(urllib.parse.urljoin(serverthread.url, '/test.css')).read(), - ) - - serverthread.stop() - self.assertFalse(serverthread.serving) - self.assertIsNone(serverthread.docserver) - self.assertIsNone(serverthread.url) - - -class PydocUrlHandlerTest(PydocBaseTest): - """Tests for pydoc._url_handler""" - - def test_content_type_err(self): - f = pydoc._url_handler - self.assertRaises(TypeError, f, 'A', '') - self.assertRaises(TypeError, f, 'B', 'foobar') - - def test_url_requests(self): - # Test for the correct title in the html pages returned. - # This tests the different parts of the URL handler without - # getting too picky about the exact html. - requests = [ - ("", "Pydoc: Index of Modules"), - ("get?key=", "Pydoc: Index of Modules"), - ("index", "Pydoc: Index of Modules"), - ("topics", "Pydoc: Topics"), - ("keywords", "Pydoc: Keywords"), - ("pydoc", "Pydoc: module pydoc"), - ("get?key=pydoc", "Pydoc: module pydoc"), - ("search?key=pydoc", "Pydoc: Search Results"), - ("topic?key=def", "Pydoc: KEYWORD def"), - ("topic?key=STRINGS", "Pydoc: TOPIC STRINGS"), - ("foobar", "Pydoc: Error - foobar"), - ] - - with self.restrict_walk_packages(): - for url, title in requests: - self.call_url_handler(url, title) - - -class TestHelper(unittest.TestCase): - def test_keywords(self): - self.assertEqual(sorted(pydoc.Helper.keywords), - sorted(keyword.kwlist)) - - -class PydocWithMetaClasses(unittest.TestCase): - @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(), - 'trace function introduces __locals__ unexpectedly') - @requires_docstrings - def test_DynamicClassAttribute(self): - class Meta(type): - def __getattr__(self, name): - if name == 'ham': - return 'spam' - return super().__getattr__(name) - class DA(metaclass=Meta): - @types.DynamicClassAttribute - def ham(self): - return 'eggs' - expected_text_data_docstrings = tuple('\n | ' + s if s else '' - for s in expected_data_docstrings) - output = StringIO() - helper = pydoc.Helper(output=output) - helper(DA) - expected_text = expected_dynamicattribute_pattern % ( - (__name__,) + expected_text_data_docstrings[:2]) - result = output.getvalue().strip() - self.assertEqual(expected_text, result) - - @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(), - 'trace function introduces __locals__ unexpectedly') - @requires_docstrings - def test_virtualClassAttributeWithOneMeta(self): - class Meta(type): - def __dir__(cls): - return ['__class__', '__module__', '__name__', 'LIFE'] - def __getattr__(self, name): - if name =='LIFE': - return 42 - return super().__getattr(name) - class Class(metaclass=Meta): - pass - output = StringIO() - helper = pydoc.Helper(output=output) - helper(Class) - expected_text = expected_virtualattribute_pattern1 % __name__ - result = output.getvalue().strip() - self.assertEqual(expected_text, result) - - @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(), - 'trace function introduces __locals__ unexpectedly') - @requires_docstrings - def test_virtualClassAttributeWithTwoMeta(self): - class Meta1(type): - def __dir__(cls): - return ['__class__', '__module__', '__name__', 'one'] - def __getattr__(self, name): - if name =='one': - return 1 - return super().__getattr__(name) - class Meta2(type): - def __dir__(cls): - return ['__class__', '__module__', '__name__', 'two'] - def __getattr__(self, name): - if name =='two': - return 2 - return super().__getattr__(name) - class Meta3(Meta1, Meta2): - def __dir__(cls): - return list(sorted(set( - ['__class__', '__module__', '__name__', 'three'] + - Meta1.__dir__(cls) + Meta2.__dir__(cls)))) - def __getattr__(self, name): - if name =='three': - return 3 - return super().__getattr__(name) - class Class1(metaclass=Meta1): - pass - class Class2(Class1, metaclass=Meta3): - pass - output = StringIO() - helper = pydoc.Helper(output=output) - helper(Class1) - expected_text1 = expected_virtualattribute_pattern2 % __name__ - result1 = output.getvalue().strip() - self.assertEqual(expected_text1, result1) - output = StringIO() - helper = pydoc.Helper(output=output) - helper(Class2) - expected_text2 = expected_virtualattribute_pattern3 % __name__ - result2 = output.getvalue().strip() - self.assertEqual(expected_text2, result2) - - @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(), - 'trace function introduces __locals__ unexpectedly') - @requires_docstrings - def test_buggy_dir(self): - class M(type): - def __dir__(cls): - return ['__class__', '__name__', 'missing', 'here'] - class C(metaclass=M): - here = 'present!' - output = StringIO() - helper = pydoc.Helper(output=output) - helper(C) - expected_text = expected_missingattribute_pattern % __name__ - result = output.getvalue().strip() - self.assertEqual(expected_text, result) - - def test_resolve_false(self): - # Issue #23008: pydoc enum.{,Int}Enum failed - # because bool(enum.Enum) is False. - with captured_stdout() as help_io: - pydoc.help('enum.Enum') - helptext = help_io.getvalue() - self.assertIn('class Enum', helptext) - - -class TestInternalUtilities(unittest.TestCase): - - def setUp(self): - tmpdir = tempfile.TemporaryDirectory() - self.argv0dir = tmpdir.name - self.argv0 = os.path.join(tmpdir.name, "nonexistent") - self.addCleanup(tmpdir.cleanup) - self.abs_curdir = abs_curdir = os.getcwd() - self.curdir_spellings = ["", os.curdir, abs_curdir] - - def _get_revised_path(self, given_path, argv0=None): - # Checking that pydoc.cli() actually calls pydoc._get_revised_path() - # is handled via code review (at least for now). - if argv0 is None: - argv0 = self.argv0 - return pydoc._get_revised_path(given_path, argv0) - - def _get_starting_path(self): - # Get a copy of sys.path without the current directory. - clean_path = sys.path.copy() - for spelling in self.curdir_spellings: - for __ in range(clean_path.count(spelling)): - clean_path.remove(spelling) - return clean_path - - def test_sys_path_adjustment_adds_missing_curdir(self): - clean_path = self._get_starting_path() - expected_path = [self.abs_curdir] + clean_path - self.assertEqual(self._get_revised_path(clean_path), expected_path) - - def test_sys_path_adjustment_removes_argv0_dir(self): - clean_path = self._get_starting_path() - expected_path = [self.abs_curdir] + clean_path - leading_argv0dir = [self.argv0dir] + clean_path - self.assertEqual(self._get_revised_path(leading_argv0dir), expected_path) - trailing_argv0dir = clean_path + [self.argv0dir] - self.assertEqual(self._get_revised_path(trailing_argv0dir), expected_path) - - def test_sys_path_adjustment_protects_pydoc_dir(self): - def _get_revised_path(given_path): - return self._get_revised_path(given_path, argv0=pydoc.__file__) - clean_path = self._get_starting_path() - leading_argv0dir = [self.argv0dir] + clean_path - expected_path = [self.abs_curdir] + leading_argv0dir - self.assertEqual(_get_revised_path(leading_argv0dir), expected_path) - trailing_argv0dir = clean_path + [self.argv0dir] - expected_path = [self.abs_curdir] + trailing_argv0dir - self.assertEqual(_get_revised_path(trailing_argv0dir), expected_path) - - def test_sys_path_adjustment_when_curdir_already_included(self): - clean_path = self._get_starting_path() - for spelling in self.curdir_spellings: - with self.subTest(curdir_spelling=spelling): - # If curdir is already present, no alterations are made at all - leading_curdir = [spelling] + clean_path - self.assertIsNone(self._get_revised_path(leading_curdir)) - trailing_curdir = clean_path + [spelling] - self.assertIsNone(self._get_revised_path(trailing_curdir)) - leading_argv0dir = [self.argv0dir] + leading_curdir - self.assertIsNone(self._get_revised_path(leading_argv0dir)) - trailing_argv0dir = trailing_curdir + [self.argv0dir] - self.assertIsNone(self._get_revised_path(trailing_argv0dir)) - - -def setUpModule(): - thread_info = threading_helper.threading_setup() - unittest.addModuleCleanup(threading_helper.threading_cleanup, *thread_info) - unittest.addModuleCleanup(reap_children) - - -if __name__ == "__main__": - unittest.main() diff -Nru python3.11-3.11.8/Lib/test/test_pyexpat.py python3.11-3.11.9/Lib/test/test_pyexpat.py --- python3.11-3.11.8/Lib/test/test_pyexpat.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_pyexpat.py 2024-04-02 08:25:04.000000000 +0000 @@ -758,5 +758,59 @@ self.assertEqual(handler_call_args, [("bar", "baz")]) +class ReparseDeferralTest(unittest.TestCase): + def test_getter_setter_round_trip(self): + parser = expat.ParserCreate() + enabled = (expat.version_info >= (2, 6, 0)) + + self.assertIs(parser.GetReparseDeferralEnabled(), enabled) + parser.SetReparseDeferralEnabled(False) + self.assertIs(parser.GetReparseDeferralEnabled(), False) + parser.SetReparseDeferralEnabled(True) + self.assertIs(parser.GetReparseDeferralEnabled(), enabled) + + def test_reparse_deferral_enabled(self): + if expat.version_info < (2, 6, 0): + self.skipTest(f'Expat {expat.version_info} does not ' + 'support reparse deferral') + + started = [] + + def start_element(name, _): + started.append(name) + + parser = expat.ParserCreate() + parser.StartElementHandler = start_element + self.assertTrue(parser.GetReparseDeferralEnabled()) + + for chunk in (b''): + parser.Parse(chunk, False) + + # The key test: Have handlers already fired? Expecting: no. + self.assertEqual(started, []) + + parser.Parse(b'', True) + + self.assertEqual(started, ['doc']) + + def test_reparse_deferral_disabled(self): + started = [] + + def start_element(name, _): + started.append(name) + + parser = expat.ParserCreate() + parser.StartElementHandler = start_element + if expat.version_info >= (2, 6, 0): + parser.SetReparseDeferralEnabled(False) + self.assertFalse(parser.GetReparseDeferralEnabled()) + + for chunk in (b''): + parser.Parse(chunk, False) + + # The key test: Have handlers already fired? Expecting: yes. + self.assertEqual(started, ['doc']) + + if __name__ == "__main__": unittest.main() diff -Nru python3.11-3.11.8/Lib/test/test_regrtest.py python3.11-3.11.9/Lib/test/test_regrtest.py --- python3.11-3.11.8/Lib/test/test_regrtest.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_regrtest.py 2024-04-02 08:25:04.000000000 +0000 @@ -27,7 +27,7 @@ from test.libregrtest import main from test.libregrtest import setup from test.libregrtest import utils -from test.libregrtest.filter import set_match_tests, match_test +from test.libregrtest.filter import get_match_tests, set_match_tests, match_test from test.libregrtest.result import TestStats from test.libregrtest.utils import normalize_test_name @@ -389,7 +389,7 @@ self.checkError(['--unknown-option'], 'unrecognized arguments: --unknown-option') - def check_ci_mode(self, args, use_resources, rerun=True): + def create_regrtest(self, args): ns = cmdline._parse_args(args) # Check Regrtest attributes which are more reliable than Namespace @@ -401,6 +401,10 @@ regrtest = main.Regrtest(ns) + return regrtest + + def check_ci_mode(self, args, use_resources, rerun=True): + regrtest = self.create_regrtest(args) self.assertEqual(regrtest.num_workers, -1) self.assertEqual(regrtest.want_rerun, rerun) self.assertTrue(regrtest.randomize) @@ -446,6 +450,29 @@ ns = cmdline._parse_args(args) self.assertFalse(ns._add_python_opts) + def test_bisect(self): + args = ['--bisect'] + regrtest = self.create_regrtest(args) + self.assertTrue(regrtest.want_bisect) + + def test_verbose3_huntrleaks(self): + args = ['-R', '3:10', '--verbose3'] + with support.captured_stderr(): + regrtest = self.create_regrtest(args) + self.assertIsNotNone(regrtest.hunt_refleak) + self.assertEqual(regrtest.hunt_refleak.warmups, 3) + self.assertEqual(regrtest.hunt_refleak.runs, 10) + self.assertFalse(regrtest.output_on_failure) + + def test_xml_huntrleaks(self): + args = ['-R', '3:12', '--junit-xml', 'output.xml'] + with support.captured_stderr(): + regrtest = self.create_regrtest(args) + self.assertIsNotNone(regrtest.hunt_refleak) + self.assertEqual(regrtest.hunt_refleak.warmups, 3) + self.assertEqual(regrtest.hunt_refleak.runs, 12) + self.assertIsNone(regrtest.junit_filename) + @dataclasses.dataclass(slots=True) class Rerun: @@ -1148,8 +1175,8 @@ stderr=subprocess.STDOUT) self.check_executed_tests(output, [test], failed=test, stats=1) - line = 'beginning 6 repetitions\n123456\n......\n' - self.check_line(output, re.escape(line)) + line = r'beginning 6 repetitions. .*\n123:456\n[.0-9X]{3} 111\n' + self.check_line(output, line) line2 = '%s leaked [1, 1, 1] %s, sum=3\n' % (test, what) self.assertIn(line2, output) @@ -1179,6 +1206,47 @@ self.check_huntrleaks(run_workers=True) @unittest.skipUnless(support.Py_DEBUG, 'need a debug build') + def test_huntrleaks_bisect(self): + # test --huntrleaks --bisect + code = textwrap.dedent(""" + import unittest + + GLOBAL_LIST = [] + + class RefLeakTest(unittest.TestCase): + def test1(self): + pass + + def test2(self): + pass + + def test3(self): + GLOBAL_LIST.append(object()) + + def test4(self): + pass + """) + + test = self.create_test('huntrleaks', code=code) + + filename = 'reflog.txt' + self.addCleanup(os_helper.unlink, filename) + cmd = ['--huntrleaks', '3:3:', '--bisect', test] + output = self.run_tests(*cmd, + exitcode=EXITCODE_BAD_TEST, + stderr=subprocess.STDOUT) + + self.assertIn(f"Bisect {test}", output) + self.assertIn(f"Bisect {test}: exit code 0", output) + + # test3 is the one which leaks + self.assertIn("Bisection completed in", output) + self.assertIn( + "Tests (1):\n" + f"* {test}.RefLeakTest.test3\n", + output) + + @unittest.skipUnless(support.Py_DEBUG, 'need a debug build') def test_huntrleaks_fd_leak(self): # test --huntrleaks for file descriptor leak code = textwrap.dedent(""" @@ -2234,6 +2302,10 @@ def id(self): return self.test_id + # Restore patterns once the test completes + patterns = get_match_tests() + self.addCleanup(set_match_tests, patterns) + test_access = Test('test.test_os.FileTests.test_access') test_chdir = Test('test.test_os.Win32ErrorTests.test_chdir') test_copy = Test('test.test_shutil.TestCopy.test_copy') diff -Nru python3.11-3.11.8/Lib/test/test_sax.py python3.11-3.11.9/Lib/test/test_sax.py --- python3.11-3.11.8/Lib/test/test_sax.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_sax.py 2024-04-02 08:25:04.000000000 +0000 @@ -19,6 +19,7 @@ from io import BytesIO, StringIO import codecs import os.path +import pyexpat import shutil import sys from urllib.error import URLError @@ -1214,6 +1215,56 @@ self.assertEqual(result.getvalue(), start + b"text") + @unittest.skipIf(pyexpat.version_info < (2, 6, 0), + f'Expat {pyexpat.version_info} does not ' + 'support reparse deferral') + def test_flush_reparse_deferral_enabled(self): + result = BytesIO() + xmlgen = XMLGenerator(result) + parser = create_parser() + parser.setContentHandler(xmlgen) + + for chunk in (""): + parser.feed(chunk) + + self.assertEqual(result.getvalue(), start) # i.e. no elements started + self.assertTrue(parser._parser.GetReparseDeferralEnabled()) + + parser.flush() + + self.assertTrue(parser._parser.GetReparseDeferralEnabled()) + self.assertEqual(result.getvalue(), start + b"") + + parser.feed("") + parser.close() + + self.assertEqual(result.getvalue(), start + b"") + + def test_flush_reparse_deferral_disabled(self): + result = BytesIO() + xmlgen = XMLGenerator(result) + parser = create_parser() + parser.setContentHandler(xmlgen) + + for chunk in (""): + parser.feed(chunk) + + if pyexpat.version_info >= (2, 6, 0): + parser._parser.SetReparseDeferralEnabled(False) + self.assertEqual(result.getvalue(), start) # i.e. no elements started + + self.assertFalse(parser._parser.GetReparseDeferralEnabled()) + + parser.flush() + + self.assertFalse(parser._parser.GetReparseDeferralEnabled()) + self.assertEqual(result.getvalue(), start + b"") + + parser.feed("") + parser.close() + + self.assertEqual(result.getvalue(), start + b"") + # ===== Locator support def test_expat_locator_noinfo(self): diff -Nru python3.11-3.11.8/Lib/test/test_shutil.py python3.11-3.11.9/Lib/test/test_shutil.py --- python3.11-3.11.8/Lib/test/test_shutil.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_shutil.py 2024-04-02 08:25:04.000000000 +0000 @@ -500,6 +500,23 @@ finally: shutil.rmtree(TESTFN, ignore_errors=True) + @unittest.skipUnless(hasattr(os, "mkfifo"), 'requires os.mkfifo()') + @unittest.skipIf(sys.platform == "vxworks", + "fifo requires special path on VxWorks") + def test_rmtree_on_named_pipe(self): + os.mkfifo(TESTFN) + try: + with self.assertRaises(NotADirectoryError): + shutil.rmtree(TESTFN) + self.assertTrue(os.path.exists(TESTFN)) + finally: + os.unlink(TESTFN) + + os.mkdir(TESTFN) + os.mkfifo(os.path.join(TESTFN, 'mypipe')) + shutil.rmtree(TESTFN) + self.assertFalse(os.path.exists(TESTFN)) + class TestCopyTree(BaseTest, unittest.TestCase): diff -Nru python3.11-3.11.8/Lib/test/test_socket.py python3.11-3.11.9/Lib/test/test_socket.py --- python3.11-3.11.8/Lib/test/test_socket.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_socket.py 2024-04-02 08:25:04.000000000 +0000 @@ -45,6 +45,7 @@ VSOCKPORT = 1234 AIX = platform.system() == "AIX" +WSL = "microsoft-standard-WSL" in platform.release() try: import _socket @@ -467,6 +468,7 @@ ThreadableTest.clientTearDown(self) @unittest.skipIf(fcntl is None, "need fcntl") +@unittest.skipIf(WSL, 'VSOCK does not work on Microsoft WSL') @unittest.skipUnless(HAVE_SOCKET_VSOCK, 'VSOCK sockets required for this test.') @unittest.skipUnless(get_cid() != 2, @@ -483,6 +485,7 @@ self.serv.bind((socket.VMADDR_CID_ANY, VSOCKPORT)) self.serv.listen() self.serverExplicitReady() + self.serv.settimeout(support.LOOPBACK_TIMEOUT) self.conn, self.connaddr = self.serv.accept() self.addCleanup(self.conn.close) diff -Nru python3.11-3.11.8/Lib/test/test_ssl.py python3.11-3.11.9/Lib/test/test_ssl.py --- python3.11-3.11.8/Lib/test/test_ssl.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_ssl.py 2024-04-02 08:25:04.000000000 +0000 @@ -350,6 +350,15 @@ ssl.OP_NO_TLSv1_2 self.assertEqual(ssl.PROTOCOL_TLS, ssl.PROTOCOL_SSLv23) + def test_options(self): + # gh-106687: SSL options values are unsigned integer (uint64_t) + for name in dir(ssl): + if not name.startswith('OP_'): + continue + with self.subTest(option=name): + value = getattr(ssl, name) + self.assertGreaterEqual(value, 0, f"ssl.{name}") + def test_ssl_types(self): ssl_types = [ _ssl._SSLContext, @@ -1197,6 +1206,7 @@ ) def test_options(self): + # Test default SSLContext options ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) # OP_ALL | OP_NO_SSLv2 | OP_NO_SSLv3 is the default value default = (ssl.OP_ALL | ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3) @@ -1205,16 +1215,30 @@ OP_SINGLE_DH_USE | OP_SINGLE_ECDH_USE | OP_ENABLE_MIDDLEBOX_COMPAT) self.assertEqual(default, ctx.options) + + # disallow TLSv1 with warnings_helper.check_warnings(): ctx.options |= ssl.OP_NO_TLSv1 self.assertEqual(default | ssl.OP_NO_TLSv1, ctx.options) + + # allow TLSv1 with warnings_helper.check_warnings(): ctx.options = (ctx.options & ~ssl.OP_NO_TLSv1) self.assertEqual(default, ctx.options) + + # clear all options ctx.options = 0 # Ubuntu has OP_NO_SSLv3 forced on by default self.assertEqual(0, ctx.options & ~ssl.OP_NO_SSLv3) + # invalid options + with self.assertRaises(OverflowError): + ctx.options = -1 + with self.assertRaises(OverflowError): + ctx.options = 2 ** 100 + with self.assertRaises(TypeError): + ctx.options = "abc" + def test_verify_mode_protocol(self): with warnings_helper.check_warnings(): ctx = ssl.SSLContext(ssl.PROTOCOL_TLS) diff -Nru python3.11-3.11.8/Lib/test/test_subprocess.py python3.11-3.11.9/Lib/test/test_subprocess.py --- python3.11-3.11.8/Lib/test/test_subprocess.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_subprocess.py 2024-04-02 08:25:04.000000000 +0000 @@ -1620,6 +1620,22 @@ subprocess.run([sys.executable, "-c", "pass"]) self.assertFalse(mock_fork_exec.call_args_list[-1].args[-1]) + @unittest.skipUnless(hasattr(subprocess, '_winapi'), + 'need subprocess._winapi') + def test_wait_negative_timeout(self): + proc = subprocess.Popen(ZERO_RETURN_CMD) + with proc: + patch = mock.patch.object( + subprocess._winapi, + 'WaitForSingleObject', + return_value=subprocess._winapi.WAIT_OBJECT_0) + with patch as mock_wait: + proc.wait(-1) # negative timeout + mock_wait.assert_called_once_with(proc._handle, 0) + proc.returncode = None + + self.assertEqual(proc.wait(), 0) + class RunFuncTestCase(BaseTestCase): def run_python(self, code, **kwargs): diff -Nru python3.11-3.11.8/Lib/test/test_sysconfig.py python3.11-3.11.9/Lib/test/test_sysconfig.py --- python3.11-3.11.8/Lib/test/test_sysconfig.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_sysconfig.py 2024-04-02 08:25:04.000000000 +0000 @@ -150,17 +150,21 @@ 'python%d.%d' % sys.version_info[:2], 'site-packages') - # Resolve the paths in prefix - binpath = os.path.join(sys.prefix, binpath) - incpath = os.path.join(sys.prefix, incpath) - libpath = os.path.join(sys.prefix, libpath) + # Resolve the paths in an imaginary venv/ directory + binpath = os.path.join('venv', binpath) + incpath = os.path.join('venv', incpath) + libpath = os.path.join('venv', libpath) + + # Mimic the venv module, set all bases to the venv directory + bases = ('base', 'platbase', 'installed_base', 'installed_platbase') + vars = {base: 'venv' for base in bases} - self.assertEqual(binpath, sysconfig.get_path('scripts', scheme='posix_venv')) - self.assertEqual(libpath, sysconfig.get_path('purelib', scheme='posix_venv')) + self.assertEqual(binpath, sysconfig.get_path('scripts', scheme='posix_venv', vars=vars)) + self.assertEqual(libpath, sysconfig.get_path('purelib', scheme='posix_venv', vars=vars)) # The include directory on POSIX isn't exactly the same as before, # but it is "within" - sysconfig_includedir = sysconfig.get_path('include', scheme='posix_venv') + sysconfig_includedir = sysconfig.get_path('include', scheme='posix_venv', vars=vars) self.assertTrue(sysconfig_includedir.startswith(incpath + os.sep)) def test_nt_venv_scheme(self): @@ -170,14 +174,19 @@ incpath = 'Include' libpath = os.path.join('Lib', 'site-packages') - # Resolve the paths in prefix - binpath = os.path.join(sys.prefix, binpath) - incpath = os.path.join(sys.prefix, incpath) - libpath = os.path.join(sys.prefix, libpath) - - self.assertEqual(binpath, sysconfig.get_path('scripts', scheme='nt_venv')) - self.assertEqual(incpath, sysconfig.get_path('include', scheme='nt_venv')) - self.assertEqual(libpath, sysconfig.get_path('purelib', scheme='nt_venv')) + # Resolve the paths in an imaginary venv\ directory + venv = 'venv' + binpath = os.path.join(venv, binpath) + incpath = os.path.join(venv, incpath) + libpath = os.path.join(venv, libpath) + + # Mimic the venv module, set all bases to the venv directory + bases = ('base', 'platbase', 'installed_base', 'installed_platbase') + vars = {base: 'venv' for base in bases} + + self.assertEqual(binpath, sysconfig.get_path('scripts', scheme='nt_venv', vars=vars)) + self.assertEqual(incpath, sysconfig.get_path('include', scheme='nt_venv', vars=vars)) + self.assertEqual(libpath, sysconfig.get_path('purelib', scheme='nt_venv', vars=vars)) def test_venv_scheme(self): if sys.platform == 'win32': diff -Nru python3.11-3.11.8/Lib/test/test_tarfile.py python3.11-3.11.9/Lib/test/test_tarfile.py --- python3.11-3.11.8/Lib/test/test_tarfile.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_tarfile.py 2024-04-02 08:25:04.000000000 +0000 @@ -487,7 +487,30 @@ with tarfile.open(support.findfile('recursion.tar')) as tar: pass + def test_extractfile_attrs(self): + # gh-74468: TarFile.name must name a file, not a parent archive. + file = self.tar.getmember('ustar/regtype') + with self.tar.extractfile(file) as fobj: + self.assertRaises(AttributeError, fobj.fileno) + self.assertIs(fobj.readable(), True) + self.assertIs(fobj.writable(), False) + if self.is_stream: + self.assertRaises(AttributeError, fobj.seekable) + else: + self.assertIs(fobj.seekable(), True) + self.assertIs(fobj.closed, False) + self.assertIs(fobj.closed, True) + self.assertRaises(AttributeError, fobj.fileno) + self.assertIs(fobj.readable(), True) + self.assertIs(fobj.writable(), False) + if self.is_stream: + self.assertRaises(AttributeError, fobj.seekable) + else: + self.assertIs(fobj.seekable(), True) + + class MiscReadTestBase(CommonReadTest): + is_stream = False def requires_name_attribute(self): pass @@ -780,6 +803,7 @@ class StreamReadTest(CommonReadTest, unittest.TestCase): prefix="r|" + is_stream = True def test_read_through(self): # Issue #11224: A poorly designed _FileInFile.read() method diff -Nru python3.11-3.11.8/Lib/test/test_threadsignals.py python3.11-3.11.9/Lib/test/test_threadsignals.py --- python3.11-3.11.8/Lib/test/test_threadsignals.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_threadsignals.py 2024-04-02 08:25:04.000000000 +0000 @@ -32,39 +32,28 @@ # a function that will be spawned as a separate thread. def send_signals(): - os.kill(process_pid, signal.SIGUSR1) - os.kill(process_pid, signal.SIGUSR2) + # We use `raise_signal` rather than `kill` because: + # * It verifies that a signal delivered to a background thread still has + # its Python-level handler called on the main thread. + # * It ensures the signal is handled before the thread exits. + signal.raise_signal(signal.SIGUSR1) + signal.raise_signal(signal.SIGUSR2) signalled_all.release() @threading_helper.requires_working_threading() -@unittest.skipUnless(hasattr(signal, "alarm"), "test requires signal.alarm") class ThreadSignals(unittest.TestCase): def test_signals(self): with threading_helper.wait_threads_exit(): # Test signal handling semantics of threads. - # We spawn a thread, have the thread send two signals, and + # We spawn a thread, have the thread send itself two signals, and # wait for it to finish. Check that we got both signals # and that they were run by the main thread. signalled_all.acquire() self.spawnSignallingThread() signalled_all.acquire() - # the signals that we asked the kernel to send - # will come back, but we don't know when. - # (it might even be after the thread exits - # and might be out of order.) If we haven't seen - # the signals yet, send yet another signal and - # wait for it return. - if signal_blackboard[signal.SIGUSR1]['tripped'] == 0 \ - or signal_blackboard[signal.SIGUSR2]['tripped'] == 0: - try: - signal.alarm(1) - signal.pause() - finally: - signal.alarm(0) - self.assertEqual( signal_blackboard[signal.SIGUSR1]['tripped'], 1) self.assertEqual( signal_blackboard[signal.SIGUSR1]['tripped_by'], thread.get_ident()) diff -Nru python3.11-3.11.8/Lib/test/test_tools/test_makefile.py python3.11-3.11.9/Lib/test/test_tools/test_makefile.py --- python3.11-3.11.8/Lib/test/test_tools/test_makefile.py 1970-01-01 00:00:00.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_tools/test_makefile.py 2024-04-02 08:25:04.000000000 +0000 @@ -0,0 +1,67 @@ +""" +Tests for `Makefile`. +""" + +import os +import unittest +from test import support +import sysconfig + +MAKEFILE = sysconfig.get_makefile_filename() + +if not support.check_impl_detail(cpython=True): + raise unittest.SkipTest('cpython only') +if not os.path.exists(MAKEFILE) or not os.path.isfile(MAKEFILE): + raise unittest.SkipTest('Makefile could not be found') + + +class TestMakefile(unittest.TestCase): + def list_test_dirs(self): + result = [] + found_testsubdirs = False + with open(MAKEFILE, 'r', encoding='utf-8') as f: + for line in f: + if line.startswith('TESTSUBDIRS='): + found_testsubdirs = True + result.append( + line.removeprefix('TESTSUBDIRS=').replace( + '\\', '', + ).strip(), + ) + continue + if found_testsubdirs: + if '\t' not in line: + break + result.append(line.replace('\\', '').strip()) + + # In Python 3.11 (and lower), many test modules are not in + # the tests/ directory. This check ignores them. + result = [d for d in result if d.startswith('test/') or d == 'test'] + + return result + + def test_makefile_test_folders(self): + test_dirs = self.list_test_dirs() + + used = [] + for dirpath, _, _ in os.walk(support.TEST_HOME_DIR): + dirname = os.path.basename(dirpath) + if dirname == '__pycache__': + continue + + relpath = os.path.relpath(dirpath, support.STDLIB_DIR) + with self.subTest(relpath=relpath): + self.assertIn( + relpath, + test_dirs, + msg=( + f"{relpath!r} is not included in the Makefile's list " + "of test directories to install" + ) + ) + used.append(relpath) + + # Check that there are no extra entries: + unique_test_dirs = set(test_dirs) + self.assertSetEqual(unique_test_dirs, set(used)) + self.assertEqual(len(test_dirs), len(unique_test_dirs)) diff -Nru python3.11-3.11.8/Lib/test/test_traceback.py python3.11-3.11.9/Lib/test/test_traceback.py --- python3.11-3.11.8/Lib/test/test_traceback.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_traceback.py 2024-04-02 08:25:04.000000000 +0000 @@ -636,6 +636,23 @@ result_lines = self.get_exception(f_with_binary_operator) self.assertEqual(result_lines, expected_error.splitlines()) + def test_caret_for_failed_assertion(self): + def f_assert(): + test = 3 + assert test == 1 and test == 2, "Bug found?" + + lineno_f = f_assert.__code__.co_firstlineno + expected_error = ( + 'Traceback (most recent call last):\n' + f' File "{__file__}", line {self.callable_line}, in get_exception\n' + ' callable()\n' + f' File "{__file__}", line {lineno_f+2}, in f_assert\n' + ' assert test == 1 and test == 2, "Bug found?"\n' + ' ^^^^^^^^^^^^^^^^^^^^^^^\n' + ) + result_lines = self.get_exception(f_assert) + self.assertEqual(result_lines, expected_error.splitlines()) + def test_traceback_specialization_with_syntax_error(self): bytecode = compile("1 / 0 / 1 / 2\n", TESTFN, "exec") diff -Nru python3.11-3.11.8/Lib/test/test_types.py python3.11-3.11.9/Lib/test/test_types.py --- python3.11-3.11.8/Lib/test/test_types.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_types.py 2024-04-02 08:25:04.000000000 +0000 @@ -709,6 +709,26 @@ self.assertEqual(hash(int | str), hash(str | int)) self.assertEqual(hash(int | str), hash(typing.Union[int, str])) + def test_union_of_unhashable(self): + class UnhashableMeta(type): + __hash__ = None + + class A(metaclass=UnhashableMeta): ... + class B(metaclass=UnhashableMeta): ... + + self.assertEqual((A | B).__args__, (A, B)) + union1 = A | B + with self.assertRaises(TypeError): + hash(union1) + + union2 = int | B + with self.assertRaises(TypeError): + hash(union2) + + union3 = A | int + with self.assertRaises(TypeError): + hash(union3) + def test_instancecheck_and_subclasscheck(self): for x in (int | str, typing.Union[int, str]): with self.subTest(x=x): diff -Nru python3.11-3.11.8/Lib/test/test_typing.py python3.11-3.11.9/Lib/test/test_typing.py --- python3.11-3.11.8/Lib/test/test_typing.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_typing.py 2024-04-02 08:25:04.000000000 +0000 @@ -1,10 +1,11 @@ import contextlib import collections from collections import defaultdict -from functools import lru_cache, wraps +from functools import lru_cache, wraps, reduce import inspect import itertools import gc +import operator import pickle import re import sys @@ -139,6 +140,26 @@ self.assertIsInstance(ms, Something) self.assertIsInstance(ms, Mock) + def test_subclassing_with_custom_constructor(self): + class Sub(Any): + def __init__(self, *args, **kwargs): pass + # The instantiation must not fail. + Sub(0, s="") + + def test_multiple_inheritance_with_custom_constructors(self): + class Foo: + def __init__(self, x): + self.x = x + + class Bar(Any, Foo): + def __init__(self, x, y): + self.y = y + super().__init__(x) + + b = Bar(1, 2) + self.assertEqual(b.x, 1) + self.assertEqual(b.y, 2) + def test_cannot_instantiate(self): with self.assertRaises(TypeError): Any() @@ -1705,6 +1726,26 @@ v = Union[u, Employee] self.assertEqual(v, Union[int, float, Employee]) + def test_union_of_unhashable(self): + class UnhashableMeta(type): + __hash__ = None + + class A(metaclass=UnhashableMeta): ... + class B(metaclass=UnhashableMeta): ... + + self.assertEqual(Union[A, B].__args__, (A, B)) + union1 = Union[A, B] + with self.assertRaises(TypeError): + hash(union1) + + union2 = Union[int, B] + with self.assertRaises(TypeError): + hash(union2) + + union3 = Union[A, int] + with self.assertRaises(TypeError): + hash(union3) + def test_repr(self): self.assertEqual(repr(Union), 'typing.Union') u = Union[Employee, int] @@ -3665,6 +3706,17 @@ c.bar = 'abc' self.assertEqual(c.__dict__, {'bar': 'abc'}) + def test_setattr_exceptions(self): + T = TypeVar("T") + class Immutable(Generic[T]): + def __setattr__(self, key, value): + raise RuntimeError("immutable") + + # gh-115165: This used to cause RuntimeError to be raised + # when we tried to set `__orig_class__` on the `Immutable` instance + # returned by the `Immutable[int]()` call + self.assertIsInstance(Immutable[int](), Immutable) + def test_subscripted_generics_as_proxies(self): T = TypeVar('T') class C(Generic[T]): @@ -4921,6 +4973,12 @@ with self.assertRaises(SyntaxError): get_type_hints(foo) + def test_syntax_error_empty_string(self): + for form in [typing.List, typing.Set, typing.Type, typing.Deque]: + with self.subTest(form=form): + with self.assertRaises(SyntaxError): + form[''] + def test_name_error(self): def foo(a: 'Noode[T]'): @@ -7308,6 +7366,17 @@ self.assertEqual(__name__, 'typing.re') self.assertEqual(len(w), 1) + def test_re_submodule_access_basics(self): + with warnings.catch_warnings(): + warnings.filterwarnings("error", category=DeprecationWarning) + from typing import re + self.assertIsInstance(re.__doc__, str) + self.assertEqual(re.__name__, "typing.re") + self.assertIsInstance(re.__dict__, types.MappingProxyType) + + with self.assertWarns(DeprecationWarning): + re.Match + def test_cannot_subclass(self): with self.assertRaises(TypeError) as ex: @@ -7352,6 +7421,76 @@ self.assertEqual(A.__metadata__, (4, 5)) self.assertEqual(A.__origin__, int) + def test_deduplicate_from_union(self): + # Regular: + self.assertEqual(get_args(Annotated[int, 1] | int), + (Annotated[int, 1], int)) + self.assertEqual(get_args(Union[Annotated[int, 1], int]), + (Annotated[int, 1], int)) + self.assertEqual(get_args(Annotated[int, 1] | Annotated[int, 2] | int), + (Annotated[int, 1], Annotated[int, 2], int)) + self.assertEqual(get_args(Union[Annotated[int, 1], Annotated[int, 2], int]), + (Annotated[int, 1], Annotated[int, 2], int)) + self.assertEqual(get_args(Annotated[int, 1] | Annotated[str, 1] | int), + (Annotated[int, 1], Annotated[str, 1], int)) + self.assertEqual(get_args(Union[Annotated[int, 1], Annotated[str, 1], int]), + (Annotated[int, 1], Annotated[str, 1], int)) + + # Duplicates: + self.assertEqual(Annotated[int, 1] | Annotated[int, 1] | int, + Annotated[int, 1] | int) + self.assertEqual(Union[Annotated[int, 1], Annotated[int, 1], int], + Union[Annotated[int, 1], int]) + + # Unhashable metadata: + self.assertEqual(get_args(str | Annotated[int, {}] | Annotated[int, set()] | int), + (str, Annotated[int, {}], Annotated[int, set()], int)) + self.assertEqual(get_args(Union[str, Annotated[int, {}], Annotated[int, set()], int]), + (str, Annotated[int, {}], Annotated[int, set()], int)) + self.assertEqual(get_args(str | Annotated[int, {}] | Annotated[str, {}] | int), + (str, Annotated[int, {}], Annotated[str, {}], int)) + self.assertEqual(get_args(Union[str, Annotated[int, {}], Annotated[str, {}], int]), + (str, Annotated[int, {}], Annotated[str, {}], int)) + + self.assertEqual(get_args(Annotated[int, 1] | str | Annotated[str, {}] | int), + (Annotated[int, 1], str, Annotated[str, {}], int)) + self.assertEqual(get_args(Union[Annotated[int, 1], str, Annotated[str, {}], int]), + (Annotated[int, 1], str, Annotated[str, {}], int)) + + import dataclasses + @dataclasses.dataclass + class ValueRange: + lo: int + hi: int + v = ValueRange(1, 2) + self.assertEqual(get_args(Annotated[int, v] | None), + (Annotated[int, v], types.NoneType)) + self.assertEqual(get_args(Union[Annotated[int, v], None]), + (Annotated[int, v], types.NoneType)) + self.assertEqual(get_args(Optional[Annotated[int, v]]), + (Annotated[int, v], types.NoneType)) + + # Unhashable metadata duplicated: + self.assertEqual(Annotated[int, {}] | Annotated[int, {}] | int, + Annotated[int, {}] | int) + self.assertEqual(Annotated[int, {}] | Annotated[int, {}] | int, + int | Annotated[int, {}]) + self.assertEqual(Union[Annotated[int, {}], Annotated[int, {}], int], + Union[Annotated[int, {}], int]) + self.assertEqual(Union[Annotated[int, {}], Annotated[int, {}], int], + Union[int, Annotated[int, {}]]) + + def test_order_in_union(self): + expr1 = Annotated[int, 1] | str | Annotated[str, {}] | int + for args in itertools.permutations(get_args(expr1)): + with self.subTest(args=args): + self.assertEqual(expr1, reduce(operator.or_, args)) + + expr2 = Union[Annotated[int, 1], str, Annotated[str, {}], int] + for args in itertools.permutations(get_args(expr2)): + with self.subTest(args=args): + self.assertEqual(expr2, Union[args]) + def test_specialize(self): L = Annotated[List[T], "my decoration"] LI = Annotated[List[int], "my decoration"] @@ -7372,6 +7511,16 @@ {Annotated[int, 4, 5], Annotated[int, 4, 5], Annotated[T, 4, 5]}, {Annotated[int, 4, 5], Annotated[T, 4, 5]} ) + # Unhashable `metadata` raises `TypeError`: + a1 = Annotated[int, []] + with self.assertRaises(TypeError): + hash(a1) + + class A: + __hash__ = None + a2 = Annotated[int, A()] + with self.assertRaises(TypeError): + hash(a2) def test_instantiate(self): class C: @@ -7397,6 +7546,17 @@ self.assertEqual(MyCount([4, 4, 5]), {4: 2, 5: 1}) self.assertEqual(MyCount[int]([4, 4, 5]), {4: 2, 5: 1}) + def test_instantiate_immutable(self): + class C: + def __setattr__(self, key, value): + raise Exception("should be ignored") + + A = Annotated[C, "a decoration"] + # gh-115165: This used to cause RuntimeError to be raised + # when we tried to set `__orig_class__` on the `C` instance + # returned by the `A()` call + self.assertIsInstance(A(), C) + def test_cannot_instantiate_forward(self): A = Annotated["int", (5, 6)] with self.assertRaises(TypeError): diff -Nru python3.11-3.11.8/Lib/test/test_unicode.py python3.11-3.11.9/Lib/test/test_unicode.py --- python3.11-3.11.8/Lib/test/test_unicode.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_unicode.py 2024-04-02 08:25:04.000000000 +0000 @@ -2443,12 +2443,7 @@ def __repr__(self): return '\\n' - class s2: - def __repr__(self): - return '\\n' - self.assertEqual(repr(s1()), '\\n') - self.assertEqual(repr(s2()), '\\n') def test_printable_repr(self): self.assertEqual(repr('\U00010000'), "'%c'" % (0x10000,)) # printable diff -Nru python3.11-3.11.8/Lib/test/test_urllib2.py python3.11-3.11.9/Lib/test/test_urllib2.py --- python3.11-3.11.8/Lib/test/test_urllib2.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_urllib2.py 2024-04-02 08:25:04.000000000 +0000 @@ -14,10 +14,11 @@ import subprocess import urllib.request -# The proxy bypass method imported below has logic specific to the OSX -# proxy config data structure but is testable on all platforms. +# The proxy bypass method imported below has logic specific to the +# corresponding system but is testable on all platforms. from urllib.request import (Request, OpenerDirector, HTTPBasicAuthHandler, HTTPPasswordMgrWithPriorAuth, _parse_proxy, + _proxy_bypass_winreg_override, _proxy_bypass_macosx_sysconf, AbstractDigestAuthHandler) from urllib.parse import urlparse @@ -763,7 +764,7 @@ ["foo", "bar"], "", None), ("ftp://localhost/baz.gif;type=a", "localhost", ftplib.FTP_PORT, "", "", "A", - [], "baz.gif", None), # XXX really this should guess image/gif + [], "baz.gif", "image/gif"), ]: req = Request(url) req.timeout = None @@ -1445,6 +1446,30 @@ self.assertEqual(req.host, "proxy.example.com:3128") self.assertEqual(req.get_header("Proxy-authorization"), "FooBar") + @unittest.skipUnless(os.name == "nt", "only relevant for Windows") + def test_winreg_proxy_bypass(self): + proxy_override = "www.example.com;*.example.net; 192.168.0.1" + proxy_bypass = _proxy_bypass_winreg_override + for host in ("www.example.com", "www.example.net", "192.168.0.1"): + self.assertTrue(proxy_bypass(host, proxy_override), + "expected bypass of %s to be true" % host) + + for host in ("example.com", "www.example.org", "example.net", + "192.168.0.2"): + self.assertFalse(proxy_bypass(host, proxy_override), + "expected bypass of %s to be False" % host) + + # check intranet address bypass + proxy_override = "example.com; " + self.assertTrue(proxy_bypass("example.com", proxy_override), + "expected bypass of %s to be true" % host) + self.assertFalse(proxy_bypass("example.net", proxy_override), + "expected bypass of %s to be False" % host) + for host in ("test", "localhost"): + self.assertTrue(proxy_bypass(host, proxy_override), + "expect to bypass intranet address '%s'" + % host) + @unittest.skipUnless(sys.platform == 'darwin', "only relevant for OSX") def test_osx_proxy_bypass(self): bypass = { diff -Nru python3.11-3.11.8/Lib/test/test_urlparse.py python3.11-3.11.9/Lib/test/test_urlparse.py --- python3.11-3.11.8/Lib/test/test_urlparse.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_urlparse.py 2024-04-02 08:25:04.000000000 +0000 @@ -19,6 +19,10 @@ ("=a", [('', 'a')]), ("a", [('a', '')]), ("a=", [('a', '')]), + ("a=b=c", [('a', 'b=c')]), + ("a%3Db=c", [('a=b', 'c')]), + ("a=b&c=d", [('a', 'b'), ('c', 'd')]), + ("a=b%26c=d", [('a', 'b&c=d')]), ("&a=b", [('a', 'b')]), ("a=a+b&b=b+c", [('a', 'a b'), ('b', 'b c')]), ("a=1&a=2", [('a', '1'), ('a', '2')]), @@ -29,6 +33,10 @@ (b"=a", [(b'', b'a')]), (b"a", [(b'a', b'')]), (b"a=", [(b'a', b'')]), + (b"a=b=c", [(b'a', b'b=c')]), + (b"a%3Db=c", [(b'a=b', b'c')]), + (b"a=b&c=d", [(b'a', b'b'), (b'c', b'd')]), + (b"a=b%26c=d", [(b'a', b'b&c=d')]), (b"&a=b", [(b'a', b'b')]), (b"a=a+b&b=b+c", [(b'a', b'a b'), (b'b', b'b c')]), (b"a=1&a=2", [(b'a', b'1'), (b'a', b'2')]), @@ -36,6 +44,14 @@ ("a=a+b;b=b+c", [('a', 'a b;b=b c')]), (b";a=b", [(b';a', b'b')]), (b"a=a+b;b=b+c", [(b'a', b'a b;b=b c')]), + + ("\u0141=\xE9", [('\u0141', '\xE9')]), + ("%C5%81=%C3%A9", [('\u0141', '\xE9')]), + ("%81=%A9", [('\ufffd', '\ufffd')]), + (b"\xc5\x81=\xc3\xa9", [(b'\xc5\x81', b'\xc3\xa9')]), + (b"%C5%81=%C3%A9", [(b'\xc5\x81', b'\xc3\xa9')]), + (b"\x81=\xA9", [(b'\x81', b'\xa9')]), + (b"%81=%A9", [(b'\x81', b'\xa9')]), ] # Each parse_qs testcase is a two-tuple that contains @@ -49,6 +65,10 @@ ("=a", {'': ['a']}), ("a", {'a': ['']}), ("a=", {'a': ['']}), + ("a=b=c", {'a': ['b=c']}), + ("a%3Db=c", {'a=b': ['c']}), + ("a=b&c=d", {'a': ['b'], 'c': ['d']}), + ("a=b%26c=d", {'a': ['b&c=d']}), ("&a=b", {'a': ['b']}), ("a=a+b&b=b+c", {'a': ['a b'], 'b': ['b c']}), ("a=1&a=2", {'a': ['1', '2']}), @@ -59,6 +79,10 @@ (b"=a", {b'': [b'a']}), (b"a", {b'a': [b'']}), (b"a=", {b'a': [b'']}), + (b"a=b=c", {b'a': [b'b=c']}), + (b"a%3Db=c", {b'a=b': [b'c']}), + (b"a=b&c=d", {b'a': [b'b'], b'c': [b'd']}), + (b"a=b%26c=d", {b'a': [b'b&c=d']}), (b"&a=b", {b'a': [b'b']}), (b"a=a+b&b=b+c", {b'a': [b'a b'], b'b': [b'b c']}), (b"a=1&a=2", {b'a': [b'1', b'2']}), @@ -66,6 +90,15 @@ ("a=a+b;b=b+c", {'a': ['a b;b=b c']}), (b";a=b", {b';a': [b'b']}), (b"a=a+b;b=b+c", {b'a':[ b'a b;b=b c']}), + (b"a=a%E2%80%99b", {b'a': [b'a\xe2\x80\x99b']}), + + ("\u0141=\xE9", {'\u0141': ['\xE9']}), + ("%C5%81=%C3%A9", {'\u0141': ['\xE9']}), + ("%81=%A9", {'\ufffd': ['\ufffd']}), + (b"\xc5\x81=\xc3\xa9", {b'\xc5\x81': [b'\xc3\xa9']}), + (b"%C5%81=%C3%A9", {b'\xc5\x81': [b'\xc3\xa9']}), + (b"\x81=\xA9", {b'\x81': [b'\xa9']}), + (b"%81=%A9", {b'\x81': [b'\xa9']}), ] class UrlParseTestCase(unittest.TestCase): @@ -990,8 +1023,8 @@ def test_parse_qsl_max_num_fields(self): with self.assertRaises(ValueError): - urllib.parse.parse_qs('&'.join(['a=a']*11), max_num_fields=10) - urllib.parse.parse_qs('&'.join(['a=a']*10), max_num_fields=10) + urllib.parse.parse_qsl('&'.join(['a=a']*11), max_num_fields=10) + urllib.parse.parse_qsl('&'.join(['a=a']*10), max_num_fields=10) def test_parse_qs_separator(self): parse_qs_semicolon_cases = [ @@ -1034,6 +1067,30 @@ result_bytes = urllib.parse.parse_qsl(orig, separator=b';') self.assertEqual(result_bytes, expect, "Error parsing %r" % orig) + def test_parse_qsl_bytes(self): + self.assertEqual(urllib.parse.parse_qsl(b'a=b'), [(b'a', b'b')]) + self.assertEqual(urllib.parse.parse_qsl(bytearray(b'a=b')), [(b'a', b'b')]) + self.assertEqual(urllib.parse.parse_qsl(memoryview(b'a=b')), [(b'a', b'b')]) + + def test_parse_qsl_false_value(self): + kwargs = dict(keep_blank_values=True, strict_parsing=True) + for x in '', b'', None, 0, 0.0, [], {}, memoryview(b''): + self.assertEqual(urllib.parse.parse_qsl(x, **kwargs), []) + self.assertRaises(ValueError, urllib.parse.parse_qsl, x, separator=1) + + def test_parse_qsl_errors(self): + self.assertRaises(TypeError, urllib.parse.parse_qsl, list(b'a=b')) + self.assertRaises(TypeError, urllib.parse.parse_qsl, iter(b'a=b')) + self.assertRaises(TypeError, urllib.parse.parse_qsl, 1) + self.assertRaises(TypeError, urllib.parse.parse_qsl, object()) + + for separator in '', b'', None, 0, 1, 0.0, 1.5: + with self.assertRaises(ValueError): + urllib.parse.parse_qsl('a=b', separator=separator) + with self.assertRaises(UnicodeEncodeError): + urllib.parse.parse_qsl(b'a=b', separator='\xa6') + with self.assertRaises(UnicodeDecodeError): + urllib.parse.parse_qsl('a=b', separator=b'\xa6') def test_urlencode_sequences(self): # Other tests incidentally urlencode things; test non-covered cases: diff -Nru python3.11-3.11.8/Lib/test/test_venv.py python3.11-3.11.9/Lib/test/test_venv.py --- python3.11-3.11.8/Lib/test/test_venv.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_venv.py 2024-04-02 08:25:04.000000000 +0000 @@ -495,7 +495,7 @@ envpy = os.path.join(os.path.realpath(self.env_dir), self.bindir, self.exe) script = os.path.join(TEST_HOME_DIR, '_test_venv_multiprocessing.py') - subprocess.check_call([envpy, script]) + subprocess.check_call([envpy, "-I", script]) @unittest.skipIf(os.name == 'nt', 'not relevant on Windows') def test_deactivate_with_strict_bash_opts(self): diff -Nru python3.11-3.11.8/Lib/test/test_xml_etree.py python3.11-3.11.9/Lib/test/test_xml_etree.py --- python3.11-3.11.8/Lib/test/test_xml_etree.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_xml_etree.py 2024-04-02 08:25:04.000000000 +0000 @@ -13,6 +13,7 @@ import operator import os import pickle +import pyexpat import sys import textwrap import types @@ -1377,12 +1378,14 @@ class XMLPullParserTest(unittest.TestCase): - def _feed(self, parser, data, chunk_size=None): + def _feed(self, parser, data, chunk_size=None, flush=False): if chunk_size is None: parser.feed(data) else: for i in range(0, len(data), chunk_size): parser.feed(data[i:i+chunk_size]) + if flush: + parser.flush() def assert_events(self, parser, expected, max_events=None): self.assertEqual( @@ -1400,28 +1403,35 @@ self.assertEqual([(action, elem.tag) for action, elem in events], expected) - def test_simple_xml(self): - for chunk_size in (None, 1, 5): - with self.subTest(chunk_size=chunk_size): - parser = ET.XMLPullParser() - self.assert_event_tags(parser, []) - self._feed(parser, "\n", chunk_size) - self.assert_event_tags(parser, []) - self._feed(parser, - "\n text\n", chunk_size) - self.assert_event_tags(parser, [('end', 'element')]) - self._feed(parser, "texttail\n", chunk_size) - self._feed(parser, "\n", chunk_size) - self.assert_event_tags(parser, [ - ('end', 'element'), - ('end', 'empty-element'), - ]) - self._feed(parser, "\n", chunk_size) - self.assert_event_tags(parser, [('end', 'root')]) - self.assertIsNone(parser.close()) + def test_simple_xml(self, chunk_size=None, flush=False): + parser = ET.XMLPullParser() + self.assert_event_tags(parser, []) + self._feed(parser, "\n", chunk_size, flush) + self.assert_event_tags(parser, []) + self._feed(parser, + "\n text\n", chunk_size, flush) + self.assert_event_tags(parser, [('end', 'element')]) + self._feed(parser, "texttail\n", chunk_size, flush) + self._feed(parser, "\n", chunk_size, flush) + self.assert_event_tags(parser, [ + ('end', 'element'), + ('end', 'empty-element'), + ]) + self._feed(parser, "\n", chunk_size, flush) + self.assert_event_tags(parser, [('end', 'root')]) + self.assertIsNone(parser.close()) + + def test_simple_xml_chunk_1(self): + self.test_simple_xml(chunk_size=1, flush=True) + + def test_simple_xml_chunk_5(self): + self.test_simple_xml(chunk_size=5, flush=True) + + def test_simple_xml_chunk_22(self): + self.test_simple_xml(chunk_size=22) def test_feed_while_iterating(self): parser = ET.XMLPullParser() @@ -1617,6 +1627,56 @@ with self.assertRaises(ValueError): ET.XMLPullParser(events=('start', 'end', 'bogus')) + @unittest.skipIf(pyexpat.version_info < (2, 6, 0), + f'Expat {pyexpat.version_info} does not ' + 'support reparse deferral') + def test_flush_reparse_deferral_enabled(self): + parser = ET.XMLPullParser(events=('start', 'end')) + + for chunk in (""): + parser.feed(chunk) + + self.assert_event_tags(parser, []) # i.e. no elements started + if ET is pyET: + self.assertTrue(parser._parser._parser.GetReparseDeferralEnabled()) + + parser.flush() + + self.assert_event_tags(parser, [('start', 'doc')]) + if ET is pyET: + self.assertTrue(parser._parser._parser.GetReparseDeferralEnabled()) + + parser.feed("") + parser.close() + + self.assert_event_tags(parser, [('end', 'doc')]) + + def test_flush_reparse_deferral_disabled(self): + parser = ET.XMLPullParser(events=('start', 'end')) + + for chunk in (""): + parser.feed(chunk) + + if pyexpat.version_info >= (2, 6, 0): + if not ET is pyET: + self.skipTest(f'XMLParser.(Get|Set)ReparseDeferralEnabled ' + 'methods not available in C') + parser._parser._parser.SetReparseDeferralEnabled(False) + self.assert_event_tags(parser, []) # i.e. no elements started + + if ET is pyET: + self.assertFalse(parser._parser._parser.GetReparseDeferralEnabled()) + + parser.flush() + + self.assert_event_tags(parser, [('start', 'doc')]) + if ET is pyET: + self.assertFalse(parser._parser._parser.GetReparseDeferralEnabled()) + + parser.feed("") + parser.close() + + self.assert_event_tags(parser, [('end', 'doc')]) # # xinclude tests (samples from appendix C of the xinclude specification) diff -Nru python3.11-3.11.8/Lib/test/test_zipfile.py python3.11-3.11.9/Lib/test/test_zipfile.py --- python3.11-3.11.8/Lib/test/test_zipfile.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/test/test_zipfile.py 2024-04-02 08:25:04.000000000 +0000 @@ -28,7 +28,7 @@ captured_stdout, captured_stderr, requires_subprocess ) from test.support.os_helper import ( - TESTFN, unlink, rmtree, temp_dir, temp_cwd, fd_count + TESTFN, unlink, rmtree, temp_dir, temp_cwd, fd_count, FakePath ) @@ -162,7 +162,7 @@ self.zip_open_test(f, self.compression) def test_open_with_pathlike(self): - path = pathlib.Path(TESTFN2) + path = FakePath(TESTFN2) self.zip_open_test(path, self.compression) with zipfile.ZipFile(path, "r", self.compression) as zipfp: self.assertIsInstance(zipfp.filename, str) @@ -449,6 +449,27 @@ self.assertEqual(zipfp.read('file1'), b'data1') self.assertEqual(zipfp.read('file2'), b'data2') + def test_zipextfile_attrs(self): + fname = "somefile.txt" + with zipfile.ZipFile(TESTFN2, mode="w") as zipfp: + zipfp.writestr(fname, "bogus") + + with zipfile.ZipFile(TESTFN2, mode="r") as zipfp: + with zipfp.open(fname) as fid: + self.assertEqual(fid.name, fname) + self.assertRaises(io.UnsupportedOperation, fid.fileno) + self.assertEqual(fid.mode, 'r') + self.assertIs(fid.readable(), True) + self.assertIs(fid.writable(), False) + self.assertIs(fid.seekable(), True) + self.assertIs(fid.closed, False) + self.assertIs(fid.closed, True) + self.assertEqual(fid.name, fname) + self.assertEqual(fid.mode, 'r') + self.assertRaises(io.UnsupportedOperation, fid.fileno) + self.assertRaises(ValueError, fid.readable) + self.assertIs(fid.writable(), False) + self.assertRaises(ValueError, fid.seekable) def tearDown(self): unlink(TESTFN) @@ -580,17 +601,16 @@ def test_io_on_closed_zipextfile(self): fname = "somefile.txt" - with zipfile.ZipFile(TESTFN2, mode="w") as zipfp: + with zipfile.ZipFile(TESTFN2, mode="w", compression=self.compression) as zipfp: zipfp.writestr(fname, "bogus") with zipfile.ZipFile(TESTFN2, mode="r") as zipfp: with zipfp.open(fname) as fid: fid.close() + self.assertIs(fid.closed, True) self.assertRaises(ValueError, fid.read) self.assertRaises(ValueError, fid.seek, 0) self.assertRaises(ValueError, fid.tell) - self.assertRaises(ValueError, fid.readable) - self.assertRaises(ValueError, fid.seekable) def test_write_to_readonly(self): """Check that trying to call write() on a readonly ZipFile object @@ -1287,6 +1307,21 @@ self.assertEqual(data.write(q), LENGTH) self.assertEqual(zip.getinfo('data').file_size, LENGTH) + def test_zipwritefile_attrs(self): + fname = "somefile.txt" + with zipfile.ZipFile(TESTFN2, mode="w", compression=self.compression) as zipfp: + with zipfp.open(fname, 'w') as fid: + self.assertRaises(io.UnsupportedOperation, fid.fileno) + self.assertIs(fid.readable(), False) + self.assertIs(fid.writable(), True) + self.assertIs(fid.seekable(), False) + self.assertIs(fid.closed, False) + self.assertIs(fid.closed, True) + self.assertRaises(io.UnsupportedOperation, fid.fileno) + self.assertIs(fid.readable(), False) + self.assertIs(fid.writable(), True) + self.assertIs(fid.seekable(), False) + class StoredWriterTests(AbstractWriterTests, unittest.TestCase): compression = zipfile.ZIP_STORED @@ -1489,7 +1524,7 @@ fp.write("print(42)\n") with TemporaryFile() as t, zipfile.PyZipFile(t, "w") as zipfp: - zipfp.writepy(pathlib.Path(TESTFN2) / "mod1.py") + zipfp.writepy(FakePath(os.path.join(TESTFN2, "mod1.py"))) names = zipfp.namelist() self.assertCompiledIn('mod1.py', names) finally: @@ -1547,7 +1582,7 @@ def test_extract_with_target_pathlike(self): with temp_dir() as extdir: - self._test_extract_with_target(pathlib.Path(extdir)) + self._test_extract_with_target(FakePath(extdir)) def test_extract_all(self): with temp_cwd(): @@ -1582,7 +1617,7 @@ def test_extract_all_with_target_pathlike(self): with temp_dir() as extdir: - self._test_extract_all_with_target(pathlib.Path(extdir)) + self._test_extract_all_with_target(FakePath(extdir)) def check_file(self, filename, content): self.assertTrue(os.path.isfile(filename)) @@ -1855,7 +1890,7 @@ fp.write("this is not a legal zip file\n") self.assertFalse(zipfile.is_zipfile(TESTFN)) # - passing a path-like object - self.assertFalse(zipfile.is_zipfile(pathlib.Path(TESTFN))) + self.assertFalse(zipfile.is_zipfile(FakePath(TESTFN))) # - passing a file object with open(TESTFN, "rb") as fp: self.assertFalse(zipfile.is_zipfile(fp)) @@ -2847,6 +2882,22 @@ os.mkdir(os.path.join(TESTFN2, "a")) self.test_extract_dir() + def test_extract_dir_backslash(self): + zfname = findfile("zipdir_backslash.zip") + with zipfile.ZipFile(zfname) as zipf: + zipf.extractall(TESTFN2) + if os.name == 'nt': + self.assertTrue(os.path.isdir(os.path.join(TESTFN2, "a"))) + self.assertTrue(os.path.isdir(os.path.join(TESTFN2, "a", "b"))) + self.assertTrue(os.path.isfile(os.path.join(TESTFN2, "a", "b", "c"))) + self.assertTrue(os.path.isdir(os.path.join(TESTFN2, "d"))) + self.assertTrue(os.path.isdir(os.path.join(TESTFN2, "d", "e"))) + else: + self.assertTrue(os.path.isfile(os.path.join(TESTFN2, "a\\b\\c"))) + self.assertTrue(os.path.isfile(os.path.join(TESTFN2, "d\\e\\"))) + self.assertFalse(os.path.exists(os.path.join(TESTFN2, "a"))) + self.assertFalse(os.path.exists(os.path.join(TESTFN2, "d"))) + def test_write_dir(self): dirpath = os.path.join(TESTFN2, "x") os.mkdir(dirpath) @@ -2957,7 +3008,7 @@ self.assertEqual(zi.file_size, os.path.getsize(__file__)) def test_from_file_pathlike(self): - zi = zipfile.ZipInfo.from_file(pathlib.Path(__file__)) + zi = zipfile.ZipInfo.from_file(FakePath(__file__)) self.assertEqual(posixpath.basename(zi.filename), 'test_zipfile.py') self.assertFalse(zi.is_dir()) self.assertEqual(zi.file_size, os.path.getsize(__file__)) Binary files /tmp/tmp7l5tttlk/8_w0TKnzWH/python3.11-3.11.8/Lib/test/zipdir_backslash.zip and /tmp/tmp7l5tttlk/WvsAu7rQMb/python3.11-3.11.9/Lib/test/zipdir_backslash.zip differ diff -Nru python3.11-3.11.8/Lib/tkinter/__init__.py python3.11-3.11.9/Lib/tkinter/__init__.py --- python3.11-3.11.8/Lib/tkinter/__init__.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/tkinter/__init__.py 2024-04-02 08:25:04.000000000 +0000 @@ -3073,11 +3073,16 @@ Widget.__init__(self, master, 'checkbutton', cnf, kw) def _setup(self, master, cnf): + # Because Checkbutton defaults to a variable with the same name as + # the widget, Checkbutton default names must be globally unique, + # not just unique within the parent widget. if not cnf.get('name'): global _checkbutton_count name = self.__class__.__name__.lower() _checkbutton_count += 1 - cnf['name'] = f'!{name}{_checkbutton_count}' + # To avoid collisions with ttk.Checkbutton, use the different + # name template. + cnf['name'] = f'!{name}-{_checkbutton_count}' super()._setup(master, cnf) def deselect(self): diff -Nru python3.11-3.11.8/Lib/tkinter/test/test_ttk/test_widgets.py python3.11-3.11.9/Lib/tkinter/test/test_ttk/test_widgets.py --- python3.11-3.11.8/Lib/tkinter/test/test_ttk/test_widgets.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/tkinter/test/test_ttk/test_widgets.py 2024-04-02 08:25:04.000000000 +0000 @@ -287,9 +287,29 @@ b.pack() buttons.append(b) variables = [str(b['variable']) for b in buttons] - print(variables) self.assertEqual(len(set(variables)), 4, variables) + def test_unique_variables2(self): + buttons = [] + f = ttk.Frame(self.root) + f.pack() + f = ttk.Frame(self.root) + f.pack() + for j in 'AB': + b = tkinter.Checkbutton(f, text=j) + b.pack() + buttons.append(b) + # Should be larger than the number of all previously created + # tkinter.Checkbutton widgets: + for j in range(100): + b = ttk.Checkbutton(f, text=str(j)) + b.pack() + buttons.append(b) + names = [str(b) for b in buttons] + self.assertEqual(len(set(names)), len(buttons), names) + variables = [str(b['variable']) for b in buttons] + self.assertEqual(len(set(variables)), len(buttons), variables) + @add_standard_options(IntegerSizeTests, StandardTtkOptionsTests) class EntryTest(AbstractWidgetTest, unittest.TestCase): diff -Nru python3.11-3.11.8/Lib/typing.py python3.11-3.11.9/Lib/typing.py --- python3.11-3.11.8/Lib/typing.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/typing.py 2024-04-02 08:25:04.000000000 +0000 @@ -303,19 +303,33 @@ newargs.append(arg) return newargs -def _deduplicate(params): +def _deduplicate(params, *, unhashable_fallback=False): # Weed out strict duplicates, preserving the first of each occurrence. - all_params = set(params) - if len(all_params) < len(params): - new_params = [] - for t in params: - if t in all_params: - new_params.append(t) - all_params.remove(t) - params = new_params - assert not all_params, all_params - return params - + try: + return dict.fromkeys(params) + except TypeError: + if not unhashable_fallback: + raise + # Happens for cases like `Annotated[dict, {'x': IntValidator()}]` + return _deduplicate_unhashable(params) + +def _deduplicate_unhashable(unhashable_params): + new_unhashable = [] + for t in unhashable_params: + if t not in new_unhashable: + new_unhashable.append(t) + return new_unhashable + +def _compare_args_orderless(first_args, second_args): + first_unhashable = _deduplicate_unhashable(first_args) + second_unhashable = _deduplicate_unhashable(second_args) + t = list(second_unhashable) + try: + for elem in first_unhashable: + t.remove(elem) + except ValueError: + return False + return not t def _remove_dups_flatten(parameters): """Internal helper for Union creation and substitution. @@ -330,7 +344,7 @@ else: params.append(p) - return tuple(_deduplicate(params)) + return tuple(_deduplicate(params, unhashable_fallback=True)) def _flatten_literal_params(parameters): @@ -522,7 +536,7 @@ def __new__(cls, *args, **kwargs): if cls is Any: raise TypeError("Any cannot be instantiated") - return super().__new__(cls, *args, **kwargs) + return super().__new__(cls) @_SpecialForm @@ -856,7 +870,7 @@ # If we do `def f(*args: *Ts)`, then we'll have `arg = '*Ts'`. # Unfortunately, this isn't a valid expression on its own, so we # do the unpacking manually. - if arg[0] == '*': + if arg.startswith('*'): arg_to_compile = f'({arg},)[0]' # E.g. (*Ts,)[0] or (*tuple[int, int],)[0] else: arg_to_compile = arg @@ -1275,7 +1289,9 @@ result = self.__origin__(*args, **kwargs) try: result.__orig_class__ = self - except AttributeError: + # Some objects raise TypeError (or something even more exotic) + # if you try to set attributes on them; we guard against that here + except Exception: pass return result @@ -1671,7 +1687,10 @@ def __eq__(self, other): if not isinstance(other, (_UnionGenericAlias, types.UnionType)): return NotImplemented - return set(self.__args__) == set(other.__args__) + try: # fast path + return set(self.__args__) == set(other.__args__) + except TypeError: # not hashable, slow path + return _compare_args_orderless(self.__args__, other.__args__) def __hash__(self): return hash(frozenset(self.__args__)) @@ -3380,7 +3399,7 @@ class _DeprecatedType(type): def __getattribute__(cls, name): - if name not in ("__dict__", "__module__") and name in cls.__dict__: + if name not in {"__dict__", "__module__", "__doc__"} and name in cls.__dict__: warnings.warn( f"{cls.__name__} is deprecated, import directly " f"from typing instead. {cls.__name__} will be removed " diff -Nru python3.11-3.11.8/Lib/unittest/mock.py python3.11-3.11.9/Lib/unittest/mock.py --- python3.11-3.11.8/Lib/unittest/mock.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/unittest/mock.py 2024-04-02 08:25:04.000000000 +0000 @@ -538,7 +538,7 @@ if self._mock_delegate is not None: ret = self._mock_delegate.return_value - if ret is DEFAULT: + if ret is DEFAULT and self._mock_wraps is None: ret = self._get_child_mock( _new_parent=self, _new_name='()' ) @@ -1194,6 +1194,9 @@ if self._mock_return_value is not DEFAULT: return self.return_value + if self._mock_delegate and self._mock_delegate.return_value is not DEFAULT: + return self.return_value + if self._mock_wraps is not None: return self._mock_wraps(*args, **kwargs) @@ -2732,9 +2735,12 @@ if _parent is not None and not instance: _parent._mock_children[_name] = mock + wrapped = kwargs.get('wraps') + if is_type and not instance and 'return_value' not in kwargs: mock.return_value = create_autospec(spec, spec_set, instance=True, - _name='()', _parent=mock) + _name='()', _parent=mock, + wraps=wrapped) for entry in dir(spec): if _is_magic(entry): @@ -2756,6 +2762,9 @@ continue kwargs = {'spec': original} + # Wrap child attributes also. + if wrapped and hasattr(wrapped, entry): + kwargs.update(wraps=original) if spec_set: kwargs = {'spec_set': original} diff -Nru python3.11-3.11.8/Lib/unittest/test/testmock/testmock.py python3.11-3.11.9/Lib/unittest/test/testmock/testmock.py --- python3.11-3.11.8/Lib/unittest/test/testmock/testmock.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/unittest/test/testmock/testmock.py 2024-04-02 08:25:04.000000000 +0000 @@ -234,6 +234,64 @@ with mock.patch('builtins.open', mock.mock_open()): mock.mock_open() # should still be valid with open() mocked + def test_create_autospec_wraps_class(self): + """Autospec a class with wraps & test if the call is passed to the + wrapped object.""" + result = "real result" + + class Result: + def get_result(self): + return result + class_mock = create_autospec(spec=Result, wraps=Result) + # Have to reassign the return_value to DEFAULT to return the real + # result (actual instance of "Result") when the mock is called. + class_mock.return_value = mock.DEFAULT + self.assertEqual(class_mock().get_result(), result) + # Autospec should also wrap child attributes of parent. + self.assertEqual(class_mock.get_result._mock_wraps, Result.get_result) + + def test_create_autospec_instance_wraps_class(self): + """Autospec a class instance with wraps & test if the call is passed + to the wrapped object.""" + result = "real result" + + class Result: + @staticmethod + def get_result(): + """This is a static method because when the mocked instance of + 'Result' will call this method, it won't be able to consume + 'self' argument.""" + return result + instance_mock = create_autospec(spec=Result, instance=True, wraps=Result) + # Have to reassign the return_value to DEFAULT to return the real + # result from "Result.get_result" when the mocked instance of "Result" + # calls "get_result". + instance_mock.get_result.return_value = mock.DEFAULT + self.assertEqual(instance_mock.get_result(), result) + # Autospec should also wrap child attributes of the instance. + self.assertEqual(instance_mock.get_result._mock_wraps, Result.get_result) + + def test_create_autospec_wraps_function_type(self): + """Autospec a function or a method with wraps & test if the call is + passed to the wrapped object.""" + result = "real result" + + class Result: + def get_result(self): + return result + func_mock = create_autospec(spec=Result.get_result, wraps=Result.get_result) + self.assertEqual(func_mock(Result()), result) + + def test_explicit_return_value_even_if_mock_wraps_object(self): + """If the mock has an explicit return_value set then calls are not + passed to the wrapped object and the return_value is returned instead. + """ + def my_func(): + return None + func_mock = create_autospec(spec=my_func, wraps=my_func) + return_value = "explicit return value" + func_mock.return_value = return_value + self.assertEqual(func_mock(), return_value) def test_reset_mock(self): parent = Mock() @@ -603,6 +661,14 @@ real = Mock() mock = Mock(wraps=real) + # If "Mock" wraps an object, just accessing its + # "return_value" ("NonCallableMock.__get_return_value") should not + # trigger its descriptor ("NonCallableMock.__set_return_value") so + # the default "return_value" should always be "sentinel.DEFAULT". + self.assertEqual(mock.return_value, DEFAULT) + # It will not be "sentinel.DEFAULT" if the mock is not wrapping any + # object. + self.assertNotEqual(real.return_value, DEFAULT) self.assertEqual(mock(), real()) real.reset_mock() diff -Nru python3.11-3.11.8/Lib/unittest/test/testmock/testpatch.py python3.11-3.11.9/Lib/unittest/test/testmock/testpatch.py --- python3.11-3.11.8/Lib/unittest/test/testmock/testpatch.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/unittest/test/testmock/testpatch.py 2024-04-02 08:25:04.000000000 +0000 @@ -1912,7 +1912,7 @@ with patch.object(foo, '__module__', "testpatch2"): self.assertEqual(foo.__module__, "testpatch2") - self.assertEqual(foo.__module__, 'unittest.test.testmock.testpatch') + self.assertEqual(foo.__module__, __name__) with patch.object(foo, '__annotations__', dict([('s', 1, )])): self.assertEqual(foo.__annotations__, dict([('s', 1, )])) diff -Nru python3.11-3.11.8/Lib/urllib/parse.py python3.11-3.11.9/Lib/urllib/parse.py --- python3.11-3.11.8/Lib/urllib/parse.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/urllib/parse.py 2024-04-02 08:25:04.000000000 +0000 @@ -755,42 +755,48 @@ Returns a list, as G-d intended. """ - qs, _coerce_result = _coerce_args(qs) - separator, _ = _coerce_args(separator) - if not separator or (not isinstance(separator, (str, bytes))): + if not separator or not isinstance(separator, (str, bytes)): raise ValueError("Separator must be of type string or bytes.") + if isinstance(qs, str): + if not isinstance(separator, str): + separator = str(separator, 'ascii') + eq = '=' + def _unquote(s): + return unquote_plus(s, encoding=encoding, errors=errors) + else: + if not qs: + return [] + # Use memoryview() to reject integers and iterables, + # acceptable by the bytes constructor. + qs = bytes(memoryview(qs)) + if isinstance(separator, str): + separator = bytes(separator, 'ascii') + eq = b'=' + def _unquote(s): + return unquote_to_bytes(s.replace(b'+', b' ')) + + if not qs: + return [] # If max_num_fields is defined then check that the number of fields # is less than max_num_fields. This prevents a memory exhaustion DOS # attack via post bodies with many fields. if max_num_fields is not None: - num_fields = 1 + qs.count(separator) if qs else 0 + num_fields = 1 + qs.count(separator) if max_num_fields < num_fields: raise ValueError('Max number of fields exceeded') r = [] - query_args = qs.split(separator) if qs else [] - for name_value in query_args: - if not name_value and not strict_parsing: - continue - nv = name_value.split('=', 1) - if len(nv) != 2: - if strict_parsing: + for name_value in qs.split(separator): + if name_value or strict_parsing: + name, has_eq, value = name_value.partition(eq) + if not has_eq and strict_parsing: raise ValueError("bad query field: %r" % (name_value,)) - # Handle case of a control-name with no equal sign - if keep_blank_values: - nv.append('') - else: - continue - if len(nv[1]) or keep_blank_values: - name = nv[0].replace('+', ' ') - name = unquote(name, encoding=encoding, errors=errors) - name = _coerce_result(name) - value = nv[1].replace('+', ' ') - value = unquote(value, encoding=encoding, errors=errors) - value = _coerce_result(value) - r.append((name, value)) + if value or keep_blank_values: + name = _unquote(name) + value = _unquote(value) + r.append((name, value)) return r def unquote_plus(string, encoding='utf-8', errors='replace'): diff -Nru python3.11-3.11.8/Lib/urllib/request.py python3.11-3.11.9/Lib/urllib/request.py --- python3.11-3.11.8/Lib/urllib/request.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/urllib/request.py 2024-04-02 08:25:04.000000000 +0000 @@ -2577,6 +2577,7 @@ } """ from fnmatch import fnmatch + from ipaddress import AddressValueError, IPv4Address hostonly, port = _splitport(host) @@ -2593,20 +2594,17 @@ return True hostIP = None + try: + hostIP = int(IPv4Address(hostonly)) + except AddressValueError: + pass for value in proxy_settings.get('exceptions', ()): # Items in the list are strings like these: *.local, 169.254/16 if not value: continue m = re.match(r"(\d+(?:\.\d+)*)(/\d+)?", value) - if m is not None: - if hostIP is None: - try: - hostIP = socket.gethostbyname(hostonly) - hostIP = ip2num(hostIP) - except OSError: - continue - + if m is not None and hostIP is not None: base = ip2num(m.group(1)) mask = m.group(2) if mask is None: @@ -2629,6 +2627,31 @@ return False +# Same as _proxy_bypass_macosx_sysconf, testable on all platforms +def _proxy_bypass_winreg_override(host, override): + """Return True if the host should bypass the proxy server. + + The proxy override list is obtained from the Windows + Internet settings proxy override registry value. + + An example of a proxy override value is: + "www.example.com;*.example.net; 192.168.0.1" + """ + from fnmatch import fnmatch + + host, _ = _splitport(host) + proxy_override = override.split(';') + for test in proxy_override: + test = test.strip() + # "" should bypass the proxy server for all intranet addresses + if test == '': + if '.' not in host: + return True + elif fnmatch(host, test): + return True + return False + + if sys.platform == 'darwin': from _scproxy import _get_proxy_settings, _get_proxies @@ -2727,7 +2750,7 @@ import winreg except ImportError: # Std modules, so should be around - but you never know! - return 0 + return False try: internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER, r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') @@ -2737,40 +2760,10 @@ 'ProxyOverride')[0]) # ^^^^ Returned as Unicode but problems if not converted to ASCII except OSError: - return 0 + return False if not proxyEnable or not proxyOverride: - return 0 - # try to make a host list from name and IP address. - rawHost, port = _splitport(host) - host = [rawHost] - try: - addr = socket.gethostbyname(rawHost) - if addr != rawHost: - host.append(addr) - except OSError: - pass - try: - fqdn = socket.getfqdn(rawHost) - if fqdn != rawHost: - host.append(fqdn) - except OSError: - pass - # make a check value list from the registry entry: replace the - # '' string by the localhost entry and the corresponding - # canonical entry. - proxyOverride = proxyOverride.split(';') - # now check if we match one of the registry values. - for test in proxyOverride: - if test == '': - if '.' not in rawHost: - return 1 - test = test.replace(".", r"\.") # mask dots - test = test.replace("*", r".*") # change glob sequence - test = test.replace("?", r".") # change glob char - for val in host: - if re.match(test, val, re.I): - return 1 - return 0 + return False + return _proxy_bypass_winreg_override(host, proxyOverride) def proxy_bypass(host): """Return True, if host should be bypassed. diff -Nru python3.11-3.11.8/Lib/xml/etree/ElementTree.py python3.11-3.11.9/Lib/xml/etree/ElementTree.py --- python3.11-3.11.8/Lib/xml/etree/ElementTree.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/xml/etree/ElementTree.py 2024-04-02 08:25:04.000000000 +0000 @@ -1328,6 +1328,11 @@ else: yield event + def flush(self): + if self._parser is None: + raise ValueError("flush() called after end of stream") + self._parser.flush() + def XML(text, parser=None): """Parse XML document from string constant. @@ -1734,6 +1739,15 @@ del self.parser, self._parser del self.target, self._target + def flush(self): + was_enabled = self.parser.GetReparseDeferralEnabled() + try: + self.parser.SetReparseDeferralEnabled(False) + self.parser.Parse(b"", False) + except self._error as v: + self._raiseerror(v) + finally: + self.parser.SetReparseDeferralEnabled(was_enabled) # -------------------------------------------------------------------- # C14N 2.0 diff -Nru python3.11-3.11.8/Lib/xml/sax/expatreader.py python3.11-3.11.9/Lib/xml/sax/expatreader.py --- python3.11-3.11.8/Lib/xml/sax/expatreader.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/xml/sax/expatreader.py 2024-04-02 08:25:04.000000000 +0000 @@ -220,6 +220,20 @@ # FIXME: when to invoke error()? self._err_handler.fatalError(exc) + def flush(self): + if self._parser is None: + return + + was_enabled = self._parser.GetReparseDeferralEnabled() + try: + self._parser.SetReparseDeferralEnabled(False) + self._parser.Parse(b"", False) + except expat.error as e: + exc = SAXParseException(expat.ErrorString(e.code), e, self) + self._err_handler.fatalError(exc) + finally: + self._parser.SetReparseDeferralEnabled(was_enabled) + def _close_source(self): source = self._source try: diff -Nru python3.11-3.11.8/Lib/zipfile.py python3.11-3.11.9/Lib/zipfile.py --- python3.11-3.11.8/Lib/zipfile.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Lib/zipfile.py 2024-04-02 08:25:04.000000000 +0000 @@ -559,7 +559,15 @@ def is_dir(self): """Return True if this archive member is a directory.""" - return self.filename[-1] == '/' + if self.filename.endswith('/'): + return True + # The ZIP format specification requires to use forward slashes + # as the directory separator, but in practice some ZIP files + # created on Windows can use backward slashes. For compatibility + # with the extraction code which already handles this: + if os.path.altsep: + return self.filename.endswith((os.path.sep, os.path.altsep)) + return False # ZIP encryption uses the CRC32 one-byte primitive for scrambling some diff -Nru python3.11-3.11.8/Makefile.pre.in python3.11-3.11.9/Makefile.pre.in --- python3.11-3.11.8/Makefile.pre.in 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Makefile.pre.in 2024-04-02 08:25:04.000000000 +0000 @@ -2030,6 +2030,7 @@ test/test_json \ test/test_module \ test/test_peg_generator \ + test/test_pydoc \ test/test_sqlite3 \ test/test_tomllib \ test/test_tomllib/data \ @@ -2064,7 +2065,11 @@ tkinter/test/test_tkinter \ tkinter/test/test_ttk \ unittest/test \ - unittest/test/testmock + unittest/test/testmock \ + test/test_concurrent_futures \ + test/test_multiprocessing_fork \ + test/test_multiprocessing_forkserver \ + test/test_multiprocessing_spawn TEST_MODULES=@TEST_MODULES@ libinstall: all $(srcdir)/Modules/xxmodule.c diff -Nru python3.11-3.11.8/Misc/ACKS python3.11-3.11.9/Misc/ACKS --- python3.11-3.11.8/Misc/ACKS 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Misc/ACKS 2024-04-02 08:25:04.000000000 +0000 @@ -740,6 +740,7 @@ Lisa Hewus Fresh Kevan Heydon Wouter van Heyst +Derek Higgins Kelsey Hightower Jason Hildebrand Ryan Hileman diff -Nru python3.11-3.11.8/Misc/NEWS python3.11-3.11.9/Misc/NEWS --- python3.11-3.11.8/Misc/NEWS 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Misc/NEWS 2024-04-02 08:25:04.000000000 +0000 @@ -2,6 +2,363 @@ Python News +++++++++++ +What's New in Python 3.11.9 final? +================================== + +*Release date: 2024-04-02* + +Security +-------- + +- gh-issue-115398: Allow controlling Expat >=2.6.0 reparse deferral + (CVE-2023-52425) by adding five new methods: + + * :meth:`xml.etree.ElementTree.XMLParser.flush` + * :meth:`xml.etree.ElementTree.XMLPullParser.flush` + * :meth:`xml.parsers.expat.xmlparser.GetReparseDeferralEnabled` + * :meth:`xml.parsers.expat.xmlparser.SetReparseDeferralEnabled` + * :meth:`xml.sax.expatreader.ExpatParser.flush` + +- gh-issue-115399: Update bundled libexpat to 2.6.0 + +- gh-issue-115243: Fix possible crashes in :meth:`collections.deque.index` + when the deque is concurrently modified. + +- gh-issue-114572: :meth:`ssl.SSLContext.cert_store_stats` and + :meth:`ssl.SSLContext.get_ca_certs` now correctly lock access to the + certificate store, when the :class:`ssl.SSLContext` is shared across + multiple threads. + +Core and Builtins +----------------- + +- gh-issue-116296: Fix possible refleak in :meth:`!object.__reduce__` + internal error handling. + +- gh-issue-116034: Fix location of the error on a failed assertion. + +- gh-issue-115823: Properly calculate error ranges in the parser when + raising :exc:`SyntaxError` exceptions caused by invalid byte sequences. + Patch by Pablo Galindo + +- gh-issue-112087: For an empty reverse iterator for list will be reduced to + :func:`reversed`. Patch by Donghee Na. + +- gh-issue-115011: Setters for members with an unsigned integer type now + support the same range of valid values for objects that has a + :meth:`~object.__index__` method as for :class:`int`. + +- gh-issue-96497: Fix incorrect resolution of mangled class variables used + in assignment expressions in comprehensions. + +Library +------- + +- gh-issue-117310: Fixed an unlikely early & extra ``Py_DECREF`` triggered + crash in :mod:`ssl` when creating a new ``_ssl._SSLContext`` if CPython + was built implausibly such that the default cipher list is empty **or** + the SSL library it was linked against reports a failure from its C + ``SSL_CTX_set_cipher_list()`` API. + +- gh-issue-117178: Fix regression in lazy loading of self-referential + modules, introduced in gh-114781. + +- gh-issue-117084: Fix :mod:`zipfile` extraction for directory entries with + the name containing backslashes on Windows. + +- gh-issue-117110: Fix a bug that prevents subclasses of :class:`typing.Any` + to be instantiated with arguments. Patch by Chris Fu. + +- gh-issue-90872: On Windows, :meth:`subprocess.Popen.wait` no longer calls + ``WaitForSingleObject()`` with a negative timeout: pass ``0`` ms if the + timeout is negative. Patch by Victor Stinner. + +- gh-issue-116957: configparser: Don't leave ConfigParser values in an + invalid state (stored as a list instead of a str) after an earlier read + raised DuplicateSectionError or DuplicateOptionError. + +- gh-issue-90095: Ignore empty lines and comments in ``.pdbrc`` + +- gh-issue-116764: Restore support of ``None`` and other false values in + :mod:`urllib.parse` functions :func:`~urllib.parse.parse_qs` and + :func:`~urllib.parse.parse_qsl`. Also, they now raise a TypeError for + non-zero integers and non-empty sequences. + +- gh-issue-116811: In ``PathFinder.invalidate_caches``, delegate to + ``MetadataPathFinder.invalidate_caches``. + +- gh-issue-116600: Fix :func:`repr` for global :class:`~enum.Flag` members. + +- gh-issue-116484: Change automatically generated + :class:`tkinter.Checkbutton` widget names to avoid collisions with + automatically generated :class:`tkinter.ttk.Checkbutton` widget names + within the same parent widget. + +- gh-issue-116401: Fix blocking :func:`os.fwalk` and :func:`shutil.rmtree` + on opening named pipe. + +- gh-issue-116143: Fix a race in pydoc ``_start_server``, eliminating a + window in which ``_start_server`` can return a thread that is "serving" + but without a ``docserver`` set. + +- gh-issue-116325: :mod:`typing`: raise :exc:`SyntaxError` instead of + :exc:`AttributeError` on forward references as empty strings. + +- gh-issue-90535: Fix support of *interval* values > 1 in + :class:`logging.TimedRotatingFileHandler` for ``when='MIDNIGHT'`` and + ``when='Wx'``. + +- gh-issue-115978: Disable preadv(), readv(), pwritev(), and writev() on + WASI. + + Under wasmtime for WASI 0.2, these functions don't pass test_posix + (https://github.com/bytecodealliance/wasmtime/issues/7830). + +- gh-issue-88352: Fix the computation of the next rollover time in the + :class:`logging.TimedRotatingFileHandler` handler. + :meth:`!computeRollover` now always returns a timestamp larger than the + specified time and works correctly during the DST change. + :meth:`!doRollover` no longer overwrite the already rolled over file, + saving from data loss when run at midnight or during repeated time at the + DST change. + +- gh-issue-87115: Set ``__main__.__spec__`` to ``None`` when running a + script with :mod:`pdb` + +- gh-issue-76511: Fix UnicodeEncodeError in :meth:`email.Message.as_string` + that results when a message that claims to be in the ascii character set + actually has non-ascii characters. Non-ascii characters are now replaced + with the U+FFFD replacement character, like in the ``replace`` error + handler. + +- gh-issue-75988: Fixed :func:`unittest.mock.create_autospec` to pass the + call through to the wrapped object to return the real result. + +- gh-issue-115881: Fix issue where :func:`ast.parse` would incorrectly flag + conditional context managers (such as ``with (x() if y else z()): ...``) + as invalid syntax if ``feature_version=(3, 8)`` was passed. This reverts + changes to the grammar made as part of gh-94949. + +- gh-issue-115886: Fix silent truncation of the name with an embedded null + character in :class:`multiprocessing.shared_memory.SharedMemory`. + +- gh-issue-115809: Improve algorithm for computing which rolled-over log + files to delete in :class:`logging.TimedRotatingFileHandler`. It is now + reliable for handlers without ``namer`` and with arbitrary deterministic + ``namer`` that leaves the datetime part in the file name unmodified. + +- gh-issue-74668: :mod:`urllib.parse` functions + :func:`~urllib.parse.parse_qs` and :func:`~urllib.parse.parse_qsl` now + support bytes arguments containing raw and percent-encoded non-ASCII data. + +- gh-issue-67044: :func:`csv.writer` now always quotes or escapes ``'\r'`` + and ``'\n'``, regardless of *lineterminator* value. + +- gh-issue-115712: :func:`csv.writer()` now quotes empty fields if delimiter + is a space and skipinitialspace is true and raises exception if quoting is + not possible. + +- gh-issue-115618: Fix improper decreasing the reference count for ``None`` + argument in :class:`property` methods :meth:`~property.getter`, + :meth:`~property.setter` and :meth:`~property.deleter`. + +- gh-issue-115570: A :exc:`DeprecationWarning` is no longer omitted on + access to the ``__doc__`` attributes of the deprecated ``typing.io`` and + ``typing.re`` pseudo-modules. + +- gh-issue-112006: Fix :func:`inspect.unwrap` for types with the + ``__wrapper__`` data descriptor. + +- gh-issue-101293: Support callables with the ``__call__()`` method and + types with ``__new__()`` and ``__init__()`` methods set to class methods, + static methods, bound methods, partial functions, and other types of + methods and descriptors in :meth:`inspect.Signature.from_callable`. + +- gh-issue-115392: Fix a bug in :mod:`doctest` where incorrect line numbers + would be reported for decorated functions. + +- gh-issue-114563: Fix several :func:`format()` bugs when using the C + implementation of :class:`~decimal.Decimal`: * memory leak in some rare + cases when using the ``z`` format option (coerce negative 0) * incorrect + output when applying the ``z`` format option to type ``F`` (fixed-point + with capital ``NAN`` / ``INF``) * incorrect output when applying the ``#`` + format option (alternate form) + +- gh-issue-115197: ``urllib.request`` no longer resolves the hostname before + checking it against the system's proxy bypass list on macOS and Windows. + +- gh-issue-115198: Fix support of Docutils >= 0.19 in :mod:`distutils`. + +- gh-issue-115165: Most exceptions are now ignored when attempting to set + the ``__orig_class__`` attribute on objects returned when calling + :mod:`typing` generic aliases (including generic aliases created using + :data:`typing.Annotated`). Previously only :exc:`AttributeError` was + ignored. Patch by Dave Shawley. + +- gh-issue-115133: Fix tests for + :class:`~xml.etree.ElementTree.XMLPullParser` with Expat 2.6.0. + +- gh-issue-115059: :meth:`io.BufferedRandom.read1` now flushes the + underlying write buffer. + +- gh-issue-79382: Trailing ``**`` no longer allows to match files and + non-existing paths in recursive :func:`~glob.glob`. + +- gh-issue-114763: Protect modules loaded with + :class:`importlib.util.LazyLoader` from race conditions when multiple + threads try to access attributes before the loading is complete. + +- gh-issue-97959: Fix rendering class methods, bound methods, method and + function aliases in :mod:`pydoc`. Class methods no longer have "method of + builtins.type instance" note. Corresponding notes are now added for class + and unbound methods. Method and function aliases now have references to + the module or the class where the origin was defined if it differs from + the current. Bound methods are now listed in the static methods section. + Methods of builtin classes are now supported as well as methods of Python + classes. + +- gh-issue-112281: Allow creating :ref:`union of types` for + :class:`typing.Annotated` with unhashable metadata. + +- gh-issue-111775: Fix + :meth:`importlib.resources.simple.ResourceHandle.open` for text mode, + added missed ``stream`` argument. + +- gh-issue-90095: Make .pdbrc and -c work with any valid pdb commands. + +- gh-issue-107155: Fix incorrect output of ``help(x)`` where ``x`` is a + :keyword:`lambda` function, which has an ``__annotations__`` dictionary + attribute with a ``"return"`` key. + +- gh-issue-105866: Fixed ``_get_slots`` bug which caused error when defining + dataclasses with slots and a weakref_slot. + +- gh-issue-60346: Fix ArgumentParser inconsistent with parse_known_args. + +- gh-issue-100985: Update HTTPSConnection to consistently wrap IPv6 + Addresses when using a proxy. + +- gh-issue-100884: email: fix misfolding of comma in address-lists over + multiple lines in combination with unicode encoding. + +- gh-issue-95782: Fix :func:`io.BufferedReader.tell`, + :func:`io.BufferedReader.seek`, :func:`_pyio.BufferedReader.tell`, + :func:`io.BufferedRandom.tell`, :func:`io.BufferedRandom.seek` and + :func:`_pyio.BufferedRandom.tell` being able to return negative offsets. + +- gh-issue-96310: Fix a traceback in :mod:`argparse` when all options in a + mutually exclusive group are suppressed. + +- gh-issue-93205: Fixed a bug in + :class:`logging.handlers.TimedRotatingFileHandler` where multiple rotating + handler instances pointing to files with the same name but different + extensions would conflict and not delete the correct files. + +- bpo-44865: Add missing call to localization function in :mod:`argparse`. + +- bpo-43952: Fix :meth:`multiprocessing.connection.Listener.accept()` to + accept empty bytes as authkey. Not accepting empty bytes as key causes it + to hang indefinitely. + +- bpo-42125: linecache: get module name from ``__spec__`` if available. This + allows getting source code for the ``__main__`` module when a custom + loader is used. + +- gh-issue-66543: Make :func:`mimetypes.guess_type` properly parsing of URLs + with only a host name, URLs containing fragment or query, and filenames + with only a UNC sharepoint on Windows. Based on patch by Dong-hee Na. + +- bpo-33775: Add 'default' and 'version' help text for localization in + argparse. + +Documentation +------------- + +- gh-issue-115399: Document CVE-2023-52425 of Expat <2.6.0 under "XML + vulnerabilities". + +- gh-issue-115233: Fix an example for :class:`~logging.LoggerAdapter` in the + Logging Cookbook. + +Tests +----- + +- gh-issue-83434: Disable JUnit XML output (``--junit-xml=FILE`` command + line option) in regrtest when hunting for reference leaks (``-R`` option). + Patch by Victor Stinner. + +- gh-issue-117187: Fix XML tests for vanilla Expat <2.6.0. + +- gh-issue-115979: Update test_importlib so that it passes under WASI SDK + 21. + +- gh-issue-116307: Added import helper ``isolated_modules`` as + ``CleanImport`` does not remove modules imported during the context. + +- gh-issue-115720: Leak tests (``-R``, ``--huntrleaks``) now show a summary + of the number of leaks found in each iteration. + +- gh-issue-115122: Add ``--bisect`` option to regrtest test runner: run + failed tests with ``test.bisect_cmd`` to identify failing tests. Patch by + Victor Stinner. + +- gh-issue-115596: Fix ``ProgramPriorityTests`` in ``test_os`` permanently + changing the process priority. + +- gh-issue-115198: Fix test_check_metadata_deprecate in distutils tests with + a newer Docutils. + +Build +----- + +- gh-issue-116313: Get WASI builds to work under wasmtime 18 w/ WASI + 0.2/preview2 primitives. + +- gh-issue-115167: Avoid vendoring ``vcruntime140_threads.dll`` when + building with Visual Studio 2022 version 17.8. + +Windows +------- + +- gh-issue-116773: Fix instances of ``<_overlapped.Overlapped object at + 0xXXX> still has pending operation at deallocation, the process may + crash``. + +- gh-issue-91227: Fix the asyncio ProactorEventLoop implementation so that + sending a datagram to an address that is not listening does not prevent + receiving any more datagrams. + +- gh-issue-115554: The installer now has more strict rules about updating + the :ref:`launcher`. In general, most users only have a single launcher + installed and will see no difference. When multiple launchers have been + installed, the option to install the launcher is disabled until all but + one have been removed. Downgrading the launcher (which was never allowed) + is now more obviously blocked. + +- gh-issue-115543: :ref:`launcher` can now detect Python 3.13 when installed + from the Microsoft Store, and will install Python 3.12 by default when + :envvar:`PYLAUNCHER_ALLOW_INSTALL` is set. + +- gh-issue-115009: Update Windows installer to use SQLite 3.45.1. + +IDLE +---- + +- gh-issue-88516: On macOS show a proxy icon in the title bar of editor + windows to match platform behaviour. + +Tools/Demos +----------- + +- gh-issue-113516: Don't set ``LDSHARED`` when building for WASI. + +C API +----- + +- gh-issue-117021: Fix integer overflow in :c:func:`PyLong_AsPid` on + non-Windows 64-bit platforms. + + What's New in Python 3.11.8 final? ================================== @@ -7824,7 +8181,7 @@ provided by Yurii Karabas. - bpo-44633: Parameter substitution of the union type with wrong types now - raises ``TypeError`` instead of returning ``NotImplemented``. + raises ``TypeError`` instead of returning :data:`NotImplemented`. - bpo-44661: Update ``property_descr_set`` to use vectorcall if possible. Patch by Donghee Na. @@ -10672,7 +11029,7 @@ suite so we can run tests that are unrelated to :class:`ProcessPoolExecutor` on those platforms. -- bpo-38302: If :func:`object.__ipow__` returns :const:`NotImplemented`, the +- bpo-38302: If :func:`object.__ipow__` returns :data:`NotImplemented`, the operator will correctly fall back to :func:`object.__pow__` and :func:`object.__rpow__` as expected. diff -Nru python3.11-3.11.8/Modules/_collectionsmodule.c python3.11-3.11.9/Modules/_collectionsmodule.c --- python3.11-3.11.8/Modules/_collectionsmodule.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/_collectionsmodule.c 2024-04-02 08:25:04.000000000 +0000 @@ -1084,8 +1084,9 @@ n = stop - i; while (--n >= 0) { CHECK_NOT_END(b); - item = b->data[index]; + item = Py_NewRef(b->data[index]); cmp = PyObject_RichCompareBool(item, v, Py_EQ); + Py_DECREF(item); if (cmp > 0) return PyLong_FromSsize_t(stop - n - 1); if (cmp < 0) diff -Nru python3.11-3.11.8/Modules/_csv.c python3.11-3.11.9/Modules/_csv.c --- python3.11-3.11.8/Modules/_csv.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/_csv.c 2024-04-02 08:25:04.000000000 +0000 @@ -1109,6 +1109,8 @@ if (c == dialect->delimiter || c == dialect->escapechar || c == dialect->quotechar || + c == '\n' || + c == '\r' || PyUnicode_FindChar( dialect->lineterminator, c, 0, PyUnicode_GET_LENGTH(dialect->lineterminator), 1) >= 0) { @@ -1180,6 +1182,7 @@ static int join_append(WriterObj *self, PyObject *field, int quoted) { + DialectObj *dialect = self->dialect; unsigned int field_kind = -1; const void *field_data = NULL; Py_ssize_t field_len = 0; @@ -1192,6 +1195,15 @@ field_data = PyUnicode_DATA(field); field_len = PyUnicode_GET_LENGTH(field); } + if (!field_len && dialect->delimiter == ' ' && dialect->skipinitialspace) { + if (dialect->quoting == QUOTE_NONE) { + PyErr_Format(self->error_obj, + "empty field must be quoted if delimiter is a space " + "and skipinitialspace is true"); + return 0; + } + quoted = 1; + } rec_len = join_append_data(self, field_kind, field_data, field_len, "ed, 0); if (rec_len < 0) diff -Nru python3.11-3.11.8/Modules/_decimal/_decimal.c python3.11-3.11.9/Modules/_decimal/_decimal.c --- python3.11-3.11.8/Modules/_decimal/_decimal.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/_decimal/_decimal.c 2024-04-02 08:25:04.000000000 +0000 @@ -144,6 +144,8 @@ static PyObject *basic_context_template = NULL; static PyObject *extended_context_template = NULL; +/* Invariant: NULL or pointer to _pydecimal.Decimal */ +static PyObject *PyDecimal = NULL; /* Error codes for functions that return signals or conditions */ #define DEC_INVALID_SIGNALS (MPD_Max_status+1U) @@ -3245,56 +3247,6 @@ return utf8; } -/* copy of libmpdec _mpd_round() */ -static void -_mpd_round(mpd_t *result, const mpd_t *a, mpd_ssize_t prec, - const mpd_context_t *ctx, uint32_t *status) -{ - mpd_ssize_t exp = a->exp + a->digits - prec; - - if (prec <= 0) { - mpd_seterror(result, MPD_Invalid_operation, status); - return; - } - if (mpd_isspecial(a) || mpd_iszero(a)) { - mpd_qcopy(result, a, status); - return; - } - - mpd_qrescale_fmt(result, a, exp, ctx, status); - if (result->digits > prec) { - mpd_qrescale_fmt(result, result, exp+1, ctx, status); - } -} - -/* Locate negative zero "z" option within a UTF-8 format spec string. - * Returns pointer to "z", else NULL. - * The portion of the spec we're working with is [[fill]align][sign][z] */ -static const char * -format_spec_z_search(char const *fmt, Py_ssize_t size) { - char const *pos = fmt; - char const *fmt_end = fmt + size; - /* skip over [[fill]align] (fill may be multi-byte character) */ - pos += 1; - while (pos < fmt_end && *pos & 0x80) { - pos += 1; - } - if (pos < fmt_end && strchr("<>=^", *pos) != NULL) { - pos += 1; - } else { - /* fill not present-- skip over [align] */ - pos = fmt; - if (pos < fmt_end && strchr("<>=^", *pos) != NULL) { - pos += 1; - } - } - /* skip over [sign] */ - if (pos < fmt_end && strchr("+- ", *pos) != NULL) { - pos += 1; - } - return pos < fmt_end && *pos == 'z' ? pos : NULL; -} - static int dict_get_item_string(PyObject *dict, const char *key, PyObject **valueobj, const char **valuestr) { @@ -3320,6 +3272,48 @@ return 0; } +/* + * Fallback _pydecimal formatting for new format specifiers that mpdecimal does + * not yet support. As documented, libmpdec follows the PEP-3101 format language: + * https://www.bytereef.org/mpdecimal/doc/libmpdec/assign-convert.html#to-string + */ +static PyObject * +pydec_format(PyObject *dec, PyObject *context, PyObject *fmt) +{ + PyObject *result; + PyObject *pydec; + PyObject *u; + + if (PyDecimal == NULL) { + PyDecimal = _PyImport_GetModuleAttrString("_pydecimal", "Decimal"); + if (PyDecimal == NULL) { + return NULL; + } + } + + u = dec_str(dec); + if (u == NULL) { + return NULL; + } + + pydec = PyObject_CallOneArg(PyDecimal, u); + Py_DECREF(u); + if (pydec == NULL) { + return NULL; + } + + result = PyObject_CallMethod(pydec, "__format__", "(OO)", fmt, context); + Py_DECREF(pydec); + + if (result == NULL && PyErr_ExceptionMatches(PyExc_ValueError)) { + /* Do not confuse users with the _pydecimal exception */ + PyErr_Clear(); + PyErr_SetString(PyExc_ValueError, "invalid format string"); + } + + return result; +} + /* Formatted representation of a PyDecObject. */ static PyObject * dec_format(PyObject *dec, PyObject *args) @@ -3332,16 +3326,11 @@ PyObject *fmtarg; PyObject *context; mpd_spec_t spec; - char const *fmt; - char *fmt_copy = NULL; + char *fmt; char *decstring = NULL; uint32_t status = 0; int replace_fillchar = 0; - int no_neg_0 = 0; Py_ssize_t size; - mpd_t *mpd = MPD(dec); - mpd_uint_t dt[MPD_MINALLOC_MAX]; - mpd_t tmp = {MPD_STATIC|MPD_STATIC_DATA,0,0,0,MPD_MINALLOC_MAX,dt}; CURRENT_CONTEXT(context); @@ -3350,39 +3339,20 @@ } if (PyUnicode_Check(fmtarg)) { - fmt = PyUnicode_AsUTF8AndSize(fmtarg, &size); + fmt = (char *)PyUnicode_AsUTF8AndSize(fmtarg, &size); if (fmt == NULL) { return NULL; } - /* NOTE: If https://github.com/python/cpython/pull/29438 lands, the - * format string manipulation below can be eliminated by enhancing - * the forked mpd_parse_fmt_str(). */ + if (size > 0 && fmt[0] == '\0') { /* NUL fill character: must be replaced with a valid UTF-8 char before calling mpd_parse_fmt_str(). */ replace_fillchar = 1; - fmt = fmt_copy = dec_strdup(fmt, size); - if (fmt_copy == NULL) { + fmt = dec_strdup(fmt, size); + if (fmt == NULL) { return NULL; } - fmt_copy[0] = '_'; - } - /* Strip 'z' option, which isn't understood by mpd_parse_fmt_str(). - * NOTE: fmt is always null terminated by PyUnicode_AsUTF8AndSize() */ - char const *z_position = format_spec_z_search(fmt, size); - if (z_position != NULL) { - no_neg_0 = 1; - size_t z_index = z_position - fmt; - if (fmt_copy == NULL) { - fmt = fmt_copy = dec_strdup(fmt, size); - if (fmt_copy == NULL) { - return NULL; - } - } - /* Shift characters (including null terminator) left, - overwriting the 'z' option. */ - memmove(fmt_copy + z_index, fmt_copy + z_index + 1, size - z_index); - size -= 1; + fmt[0] = '_'; } } else { @@ -3392,10 +3362,13 @@ } if (!mpd_parse_fmt_str(&spec, fmt, CtxCaps(context))) { - PyErr_SetString(PyExc_ValueError, - "invalid format string"); - goto finish; + if (replace_fillchar) { + PyMem_Free(fmt); + } + + return pydec_format(dec, context, fmtarg); } + if (replace_fillchar) { /* In order to avoid clobbering parts of UTF-8 thousands separators or decimal points when the substitution is reversed later, the actual @@ -3448,45 +3421,8 @@ } } - if (no_neg_0 && mpd_isnegative(mpd) && !mpd_isspecial(mpd)) { - /* Round into a temporary (carefully mirroring the rounding - of mpd_qformat_spec()), and check if the result is negative zero. - If so, clear the sign and format the resulting positive zero. */ - mpd_ssize_t prec; - mpd_qcopy(&tmp, mpd, &status); - if (spec.prec >= 0) { - switch (spec.type) { - case 'f': - mpd_qrescale(&tmp, &tmp, -spec.prec, CTX(context), &status); - break; - case '%': - tmp.exp += 2; - mpd_qrescale(&tmp, &tmp, -spec.prec, CTX(context), &status); - break; - case 'g': - prec = (spec.prec == 0) ? 1 : spec.prec; - if (tmp.digits > prec) { - _mpd_round(&tmp, &tmp, prec, CTX(context), &status); - } - break; - case 'e': - if (!mpd_iszero(&tmp)) { - _mpd_round(&tmp, &tmp, spec.prec+1, CTX(context), &status); - } - break; - } - } - if (status & MPD_Errors) { - PyErr_SetString(PyExc_ValueError, "unexpected error when rounding"); - goto finish; - } - if (mpd_iszero(&tmp)) { - mpd_set_positive(&tmp); - mpd = &tmp; - } - } - decstring = mpd_qformat_spec(mpd, &spec, CTX(context), &status); + decstring = mpd_qformat_spec(MPD(dec), &spec, CTX(context), &status); if (decstring == NULL) { if (status & MPD_Malloc_error) { PyErr_NoMemory(); @@ -3509,7 +3445,7 @@ Py_XDECREF(grouping); Py_XDECREF(sep); Py_XDECREF(dot); - if (fmt_copy) PyMem_Free(fmt_copy); + if (replace_fillchar) PyMem_Free(fmt); if (decstring) mpd_free(decstring); return result; } @@ -5944,6 +5880,8 @@ /* Create the module */ ASSIGN_PTR(m, PyModule_Create(&_decimal_module)); + /* For format specifiers not yet supported by libmpdec */ + PyDecimal = NULL; /* Add types to the module */ CHECK_INT(PyModule_AddObjectRef(m, "Decimal", (PyObject *)&PyDec_Type)); diff -Nru python3.11-3.11.8/Modules/_elementtree.c python3.11-3.11.9/Modules/_elementtree.c --- python3.11-3.11.8/Modules/_elementtree.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/_elementtree.c 2024-04-02 08:25:04.000000000 +0000 @@ -3876,6 +3876,38 @@ } /*[clinic input] +_elementtree.XMLParser.flush + +[clinic start generated code]*/ + +static PyObject * +_elementtree_XMLParser_flush_impl(XMLParserObject *self) +/*[clinic end generated code: output=42fdb8795ca24509 input=effbecdb28715949]*/ +{ + if (!_check_xmlparser(self)) { + return NULL; + } + + if (EXPAT(SetReparseDeferralEnabled) == NULL) { + Py_RETURN_NONE; + } + + // NOTE: The Expat parser in the C implementation of ElementTree is not + // exposed to the outside; as a result we known that reparse deferral + // is currently enabled, or we would not even have access to function + // XML_SetReparseDeferralEnabled in the first place (which we checked + // for, a few lines up). + + EXPAT(SetReparseDeferralEnabled)(self->parser, XML_FALSE); + + PyObject *res = expat_parse(self, "", 0, XML_FALSE); + + EXPAT(SetReparseDeferralEnabled)(self->parser, XML_TRUE); + + return res; +} + +/*[clinic input] _elementtree.XMLParser.feed data: object @@ -4302,6 +4334,7 @@ static PyMethodDef xmlparser_methods[] = { _ELEMENTTREE_XMLPARSER_FEED_METHODDEF _ELEMENTTREE_XMLPARSER_CLOSE_METHODDEF + _ELEMENTTREE_XMLPARSER_FLUSH_METHODDEF _ELEMENTTREE_XMLPARSER__PARSE_WHOLE_METHODDEF _ELEMENTTREE_XMLPARSER__SETEVENTS_METHODDEF {NULL, NULL} diff -Nru python3.11-3.11.8/Modules/_io/bufferedio.c python3.11-3.11.9/Modules/_io/bufferedio.c --- python3.11-3.11.8/Modules/_io/bufferedio.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/_io/bufferedio.c 2024-04-02 08:25:04.000000000 +0000 @@ -929,6 +929,16 @@ Py_DECREF(res); return NULL; } + /* Flush the write buffer if necessary */ + if (self->writable) { + PyObject *r = buffered_flush_and_rewind_unlocked(self); + if (r == NULL) { + LEAVE_BUFFERED(self) + Py_DECREF(res); + return NULL; + } + Py_DECREF(r); + } _bufferedreader_reset_buf(self); r = _bufferedreader_raw_read(self, PyBytes_AS_STRING(res), n); LEAVE_BUFFERED(self) @@ -1186,7 +1196,11 @@ if (pos == -1) return NULL; pos -= RAW_OFFSET(self); - /* TODO: sanity check (pos >= 0) */ + + // GH-95782 + if (pos < 0) + pos = 0; + return PyLong_FromOff_t(pos); } @@ -1253,6 +1267,11 @@ offset = target; if (offset >= -self->pos && offset <= avail) { self->pos += offset; + + // GH-95782 + if (current - avail + offset < 0) + return PyLong_FromOff_t(0); + return PyLong_FromOff_t(current - avail + offset); } } diff -Nru python3.11-3.11.8/Modules/_multiprocessing/posixshmem.c python3.11-3.11.9/Modules/_multiprocessing/posixshmem.c --- python3.11-3.11.8/Modules/_multiprocessing/posixshmem.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/_multiprocessing/posixshmem.c 2024-04-02 08:25:04.000000000 +0000 @@ -42,10 +42,15 @@ { int fd; int async_err = 0; - const char *name = PyUnicode_AsUTF8(path); + Py_ssize_t name_size; + const char *name = PyUnicode_AsUTF8AndSize(path, &name_size); if (name == NULL) { return -1; } + if (strlen(name) != (size_t)name_size) { + PyErr_SetString(PyExc_ValueError, "embedded null character"); + return -1; + } do { Py_BEGIN_ALLOW_THREADS fd = shm_open(name, flags, mode); @@ -81,10 +86,15 @@ { int rv; int async_err = 0; - const char *name = PyUnicode_AsUTF8(path); + Py_ssize_t name_size; + const char *name = PyUnicode_AsUTF8AndSize(path, &name_size); if (name == NULL) { return NULL; } + if (strlen(name) != (size_t)name_size) { + PyErr_SetString(PyExc_ValueError, "embedded null character"); + return NULL; + } do { Py_BEGIN_ALLOW_THREADS rv = shm_unlink(name); diff -Nru python3.11-3.11.8/Modules/_ssl.c python3.11-3.11.9/Modules/_ssl.c --- python3.11-3.11.8/Modules/_ssl.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/_ssl.c 2024-04-02 08:25:04.000000000 +0000 @@ -3023,7 +3023,7 @@ /*[clinic end generated code: output=2cf0d7a0741b6bd1 input=8d58a805b95fc534]*/ { PySSLContext *self; - long options; + uint64_t options; const SSL_METHOD *method = NULL; SSL_CTX *ctx = NULL; X509_VERIFY_PARAM *params; @@ -3165,7 +3165,6 @@ result = SSL_CTX_set_cipher_list(ctx, "HIGH:!aNULL:!eNULL"); } if (result == 0) { - Py_DECREF(self); ERR_clear_error(); PyErr_SetString(get_state_ctx(self)->PySSLErrorObject, "No cipher can be selected."); @@ -3621,20 +3620,32 @@ static PyObject * get_options(PySSLContext *self, void *c) { - return PyLong_FromLong(SSL_CTX_get_options(self->ctx)); + uint64_t options = SSL_CTX_get_options(self->ctx); + Py_BUILD_ASSERT(sizeof(unsigned long long) >= sizeof(options)); + return PyLong_FromUnsignedLongLong(options); } static int set_options(PySSLContext *self, PyObject *arg, void *c) { - long new_opts, opts, set, clear; - long opt_no = ( + PyObject *new_opts_obj; + unsigned long long new_opts_arg; + uint64_t new_opts, opts, clear, set; + uint64_t opt_no = ( SSL_OP_NO_SSLv2 | SSL_OP_NO_SSLv3 | SSL_OP_NO_TLSv1 | SSL_OP_NO_TLSv1_1 | SSL_OP_NO_TLSv1_2 | SSL_OP_NO_TLSv1_3 ); - if (!PyArg_Parse(arg, "l", &new_opts)) + if (!PyArg_Parse(arg, "O!", &PyLong_Type, &new_opts_obj)) { return -1; + } + new_opts_arg = PyLong_AsUnsignedLongLong(new_opts_obj); + if (new_opts_arg == (unsigned long long)-1 && PyErr_Occurred()) { + return -1; + } + Py_BUILD_ASSERT(sizeof(new_opts) >= sizeof(new_opts_arg)); + new_opts = (uint64_t)new_opts_arg; + opts = SSL_CTX_get_options(self->ctx); clear = opts & ~new_opts; set = ~opts & new_opts; @@ -3648,8 +3659,9 @@ if (clear) { SSL_CTX_clear_options(self->ctx, clear); } - if (set) + if (set) { SSL_CTX_set_options(self->ctx, set); + } return 0; } @@ -4529,6 +4541,50 @@ return 0; } +#if OPENSSL_VERSION_NUMBER < 0x30300000L +static X509_OBJECT *x509_object_dup(const X509_OBJECT *obj) +{ + int ok; + X509_OBJECT *ret = X509_OBJECT_new(); + if (ret == NULL) { + return NULL; + } + switch (X509_OBJECT_get_type(obj)) { + case X509_LU_X509: + ok = X509_OBJECT_set1_X509(ret, X509_OBJECT_get0_X509(obj)); + break; + case X509_LU_CRL: + /* X509_OBJECT_get0_X509_CRL was not const-correct prior to 3.0.*/ + ok = X509_OBJECT_set1_X509_CRL( + ret, X509_OBJECT_get0_X509_CRL((X509_OBJECT *)obj)); + break; + default: + /* We cannot duplicate unrecognized types in a polyfill, but it is + * safe to leave an empty object. The caller will ignore it. */ + ok = 1; + break; + } + if (!ok) { + X509_OBJECT_free(ret); + return NULL; + } + return ret; +} + +static STACK_OF(X509_OBJECT) * +X509_STORE_get1_objects(X509_STORE *store) +{ + STACK_OF(X509_OBJECT) *ret; + if (!X509_STORE_lock(store)) { + return NULL; + } + ret = sk_X509_OBJECT_deep_copy(X509_STORE_get0_objects(store), + x509_object_dup, X509_OBJECT_free); + X509_STORE_unlock(store); + return ret; +} +#endif + PyDoc_STRVAR(PySSLContext_sni_callback_doc, "Set a callback that will be called when a server name is provided by the SSL/TLS client in the SNI extension.\n\ \n\ @@ -4558,7 +4614,12 @@ int x509 = 0, crl = 0, ca = 0, i; store = SSL_CTX_get_cert_store(self->ctx); - objs = X509_STORE_get0_objects(store); + objs = X509_STORE_get1_objects(store); + if (objs == NULL) { + PyErr_SetString(PyExc_MemoryError, "failed to query cert store"); + return NULL; + } + for (i = 0; i < sk_X509_OBJECT_num(objs); i++) { obj = sk_X509_OBJECT_value(objs, i); switch (X509_OBJECT_get_type(obj)) { @@ -4572,12 +4633,11 @@ crl++; break; default: - /* Ignore X509_LU_FAIL, X509_LU_RETRY, X509_LU_PKEY. - * As far as I can tell they are internal states and never - * stored in a cert store */ + /* Ignore unrecognized types. */ break; } } + sk_X509_OBJECT_pop_free(objs, X509_OBJECT_free); return Py_BuildValue("{sisisi}", "x509", x509, "crl", crl, "x509_ca", ca); } @@ -4609,7 +4669,12 @@ } store = SSL_CTX_get_cert_store(self->ctx); - objs = X509_STORE_get0_objects(store); + objs = X509_STORE_get1_objects(store); + if (objs == NULL) { + PyErr_SetString(PyExc_MemoryError, "failed to query cert store"); + goto error; + } + for (i = 0; i < sk_X509_OBJECT_num(objs); i++) { X509_OBJECT *obj; X509 *cert; @@ -4637,9 +4702,11 @@ } Py_CLEAR(ci); } + sk_X509_OBJECT_pop_free(objs, X509_OBJECT_free); return rlist; error: + sk_X509_OBJECT_pop_free(objs, X509_OBJECT_free); Py_XDECREF(ci); Py_XDECREF(rlist); return NULL; @@ -5780,10 +5847,24 @@ return 0; } + +static int +sslmodule_add_option(PyObject *m, const char *name, uint64_t value) +{ + Py_BUILD_ASSERT(sizeof(unsigned long long) >= sizeof(value)); + PyObject *obj = PyLong_FromUnsignedLongLong(value); + if (obj == NULL) { + return -1; + } + int res = PyModule_AddObjectRef(m, name, obj); + Py_DECREF(obj); + return res; +} + + static int sslmodule_init_constants(PyObject *m) { - PyModule_AddStringConstant(m, "_DEFAULT_CIPHERS", PY_SSL_DEFAULT_CIPHER_STRING); @@ -5907,40 +5988,41 @@ PyModule_AddIntConstant(m, "PROTOCOL_TLSv1_2", PY_SSL_VERSION_TLS1_2); +#define ADD_OPTION(NAME, VALUE) if (sslmodule_add_option(m, NAME, (VALUE)) < 0) return -1 + /* protocol options */ - PyModule_AddIntConstant(m, "OP_ALL", - SSL_OP_ALL & ~SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS); - PyModule_AddIntConstant(m, "OP_NO_SSLv2", SSL_OP_NO_SSLv2); - PyModule_AddIntConstant(m, "OP_NO_SSLv3", SSL_OP_NO_SSLv3); - PyModule_AddIntConstant(m, "OP_NO_TLSv1", SSL_OP_NO_TLSv1); - PyModule_AddIntConstant(m, "OP_NO_TLSv1_1", SSL_OP_NO_TLSv1_1); - PyModule_AddIntConstant(m, "OP_NO_TLSv1_2", SSL_OP_NO_TLSv1_2); + ADD_OPTION("OP_ALL", SSL_OP_ALL & ~SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS); + ADD_OPTION("OP_NO_SSLv2", SSL_OP_NO_SSLv2); + ADD_OPTION("OP_NO_SSLv3", SSL_OP_NO_SSLv3); + ADD_OPTION("OP_NO_TLSv1", SSL_OP_NO_TLSv1); + ADD_OPTION("OP_NO_TLSv1_1", SSL_OP_NO_TLSv1_1); + ADD_OPTION("OP_NO_TLSv1_2", SSL_OP_NO_TLSv1_2); #ifdef SSL_OP_NO_TLSv1_3 - PyModule_AddIntConstant(m, "OP_NO_TLSv1_3", SSL_OP_NO_TLSv1_3); + ADD_OPTION("OP_NO_TLSv1_3", SSL_OP_NO_TLSv1_3); #else - PyModule_AddIntConstant(m, "OP_NO_TLSv1_3", 0); + ADD_OPTION("OP_NO_TLSv1_3", 0); #endif - PyModule_AddIntConstant(m, "OP_CIPHER_SERVER_PREFERENCE", + ADD_OPTION("OP_CIPHER_SERVER_PREFERENCE", SSL_OP_CIPHER_SERVER_PREFERENCE); - PyModule_AddIntConstant(m, "OP_SINGLE_DH_USE", SSL_OP_SINGLE_DH_USE); - PyModule_AddIntConstant(m, "OP_NO_TICKET", SSL_OP_NO_TICKET); + ADD_OPTION("OP_SINGLE_DH_USE", SSL_OP_SINGLE_DH_USE); + ADD_OPTION("OP_NO_TICKET", SSL_OP_NO_TICKET); #ifdef SSL_OP_SINGLE_ECDH_USE - PyModule_AddIntConstant(m, "OP_SINGLE_ECDH_USE", SSL_OP_SINGLE_ECDH_USE); + ADD_OPTION("OP_SINGLE_ECDH_USE", SSL_OP_SINGLE_ECDH_USE); #endif #ifdef SSL_OP_NO_COMPRESSION - PyModule_AddIntConstant(m, "OP_NO_COMPRESSION", + ADD_OPTION("OP_NO_COMPRESSION", SSL_OP_NO_COMPRESSION); #endif #ifdef SSL_OP_ENABLE_MIDDLEBOX_COMPAT - PyModule_AddIntConstant(m, "OP_ENABLE_MIDDLEBOX_COMPAT", + ADD_OPTION("OP_ENABLE_MIDDLEBOX_COMPAT", SSL_OP_ENABLE_MIDDLEBOX_COMPAT); #endif #ifdef SSL_OP_NO_RENEGOTIATION - PyModule_AddIntConstant(m, "OP_NO_RENEGOTIATION", + ADD_OPTION("OP_NO_RENEGOTIATION", SSL_OP_NO_RENEGOTIATION); #endif #ifdef SSL_OP_IGNORE_UNEXPECTED_EOF - PyModule_AddIntConstant(m, "OP_IGNORE_UNEXPECTED_EOF", + ADD_OPTION("OP_IGNORE_UNEXPECTED_EOF", SSL_OP_IGNORE_UNEXPECTED_EOF); #endif diff -Nru python3.11-3.11.8/Modules/_testbuffer.c python3.11-3.11.9/Modules/_testbuffer.c --- python3.11-3.11.8/Modules/_testbuffer.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/_testbuffer.c 2024-04-02 08:25:04.000000000 +0000 @@ -2825,70 +2825,91 @@ NULL }; - -PyMODINIT_FUNC -PyInit__testbuffer(void) +static int +_testbuffer_exec(PyObject *mod) { - PyObject *m; - - m = PyModule_Create(&_testbuffermodule); - if (m == NULL) - return NULL; - Py_SET_TYPE(&NDArray_Type, &PyType_Type); - Py_INCREF(&NDArray_Type); - PyModule_AddObject(m, "ndarray", (PyObject *)&NDArray_Type); + if (PyModule_AddType(mod, &NDArray_Type) < 0) { + return -1; + } Py_SET_TYPE(&StaticArray_Type, &PyType_Type); - Py_INCREF(&StaticArray_Type); - PyModule_AddObject(m, "staticarray", (PyObject *)&StaticArray_Type); + if (PyModule_AddType(mod, &StaticArray_Type) < 0) { + return -1; + } structmodule = PyImport_ImportModule("struct"); - if (structmodule == NULL) - return NULL; + if (structmodule == NULL) { + return -1; + } Struct = PyObject_GetAttrString(structmodule, "Struct"); + if (Struct == NULL) { + return -1; + } calcsize = PyObject_GetAttrString(structmodule, "calcsize"); - if (Struct == NULL || calcsize == NULL) - return NULL; + if (calcsize == NULL) { + return -1; + } simple_format = PyUnicode_FromString(simple_fmt); - if (simple_format == NULL) - return NULL; + if (simple_format == NULL) { + return -1; + } + +#define ADD_INT_MACRO(mod, macro) \ + do { \ + if (PyModule_AddIntConstant(mod, #macro, macro) < 0) { \ + return -1; \ + } \ + } while (0) + + ADD_INT_MACRO(mod, ND_MAX_NDIM); + ADD_INT_MACRO(mod, ND_VAREXPORT); + ADD_INT_MACRO(mod, ND_WRITABLE); + ADD_INT_MACRO(mod, ND_FORTRAN); + ADD_INT_MACRO(mod, ND_SCALAR); + ADD_INT_MACRO(mod, ND_PIL); + ADD_INT_MACRO(mod, ND_GETBUF_FAIL); + ADD_INT_MACRO(mod, ND_GETBUF_UNDEFINED); + ADD_INT_MACRO(mod, ND_REDIRECT); + + ADD_INT_MACRO(mod, PyBUF_SIMPLE); + ADD_INT_MACRO(mod, PyBUF_WRITABLE); + ADD_INT_MACRO(mod, PyBUF_FORMAT); + ADD_INT_MACRO(mod, PyBUF_ND); + ADD_INT_MACRO(mod, PyBUF_STRIDES); + ADD_INT_MACRO(mod, PyBUF_INDIRECT); + ADD_INT_MACRO(mod, PyBUF_C_CONTIGUOUS); + ADD_INT_MACRO(mod, PyBUF_F_CONTIGUOUS); + ADD_INT_MACRO(mod, PyBUF_ANY_CONTIGUOUS); + ADD_INT_MACRO(mod, PyBUF_FULL); + ADD_INT_MACRO(mod, PyBUF_FULL_RO); + ADD_INT_MACRO(mod, PyBUF_RECORDS); + ADD_INT_MACRO(mod, PyBUF_RECORDS_RO); + ADD_INT_MACRO(mod, PyBUF_STRIDED); + ADD_INT_MACRO(mod, PyBUF_STRIDED_RO); + ADD_INT_MACRO(mod, PyBUF_CONTIG); + ADD_INT_MACRO(mod, PyBUF_CONTIG_RO); - PyModule_AddIntMacro(m, ND_MAX_NDIM); - PyModule_AddIntMacro(m, ND_VAREXPORT); - PyModule_AddIntMacro(m, ND_WRITABLE); - PyModule_AddIntMacro(m, ND_FORTRAN); - PyModule_AddIntMacro(m, ND_SCALAR); - PyModule_AddIntMacro(m, ND_PIL); - PyModule_AddIntMacro(m, ND_GETBUF_FAIL); - PyModule_AddIntMacro(m, ND_GETBUF_UNDEFINED); - PyModule_AddIntMacro(m, ND_REDIRECT); - - PyModule_AddIntMacro(m, PyBUF_SIMPLE); - PyModule_AddIntMacro(m, PyBUF_WRITABLE); - PyModule_AddIntMacro(m, PyBUF_FORMAT); - PyModule_AddIntMacro(m, PyBUF_ND); - PyModule_AddIntMacro(m, PyBUF_STRIDES); - PyModule_AddIntMacro(m, PyBUF_INDIRECT); - PyModule_AddIntMacro(m, PyBUF_C_CONTIGUOUS); - PyModule_AddIntMacro(m, PyBUF_F_CONTIGUOUS); - PyModule_AddIntMacro(m, PyBUF_ANY_CONTIGUOUS); - PyModule_AddIntMacro(m, PyBUF_FULL); - PyModule_AddIntMacro(m, PyBUF_FULL_RO); - PyModule_AddIntMacro(m, PyBUF_RECORDS); - PyModule_AddIntMacro(m, PyBUF_RECORDS_RO); - PyModule_AddIntMacro(m, PyBUF_STRIDED); - PyModule_AddIntMacro(m, PyBUF_STRIDED_RO); - PyModule_AddIntMacro(m, PyBUF_CONTIG); - PyModule_AddIntMacro(m, PyBUF_CONTIG_RO); + ADD_INT_MACRO(mod, PyBUF_READ); + ADD_INT_MACRO(mod, PyBUF_WRITE); - PyModule_AddIntMacro(m, PyBUF_READ); - PyModule_AddIntMacro(m, PyBUF_WRITE); +#undef ADD_INT_MACRO - return m; + return 0; } - - +PyMODINIT_FUNC +PyInit__testbuffer(void) +{ + PyObject *mod = PyModule_Create(&_testbuffermodule); + if (mod == NULL) { + return NULL; + } + if (_testbuffer_exec(mod) < 0) { + Py_DECREF(mod); + return NULL; + } + return mod; +} diff -Nru python3.11-3.11.8/Modules/_testcapimodule.c python3.11-3.11.9/Modules/_testcapimodule.c --- python3.11-3.11.8/Modules/_testcapimodule.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/_testcapimodule.c 2024-04-02 08:25:04.000000000 +0000 @@ -8221,6 +8221,7 @@ PyModule_AddObject(m, "PY_SSIZE_T_MAX", PyLong_FromSsize_t(PY_SSIZE_T_MAX)); PyModule_AddObject(m, "PY_SSIZE_T_MIN", PyLong_FromSsize_t(PY_SSIZE_T_MIN)); PyModule_AddObject(m, "SIZEOF_TIME_T", PyLong_FromSsize_t(sizeof(time_t))); + PyModule_AddObject(m, "SIZEOF_PID_T", PyLong_FromSsize_t(sizeof(pid_t))); PyModule_AddObject(m, "Py_Version", PyLong_FromUnsignedLong(Py_Version)); Py_INCREF(&PyInstanceMethod_Type); PyModule_AddObject(m, "instancemethod", (PyObject *)&PyInstanceMethod_Type); diff -Nru python3.11-3.11.8/Modules/arraymodule.c python3.11-3.11.9/Modules/arraymodule.c --- python3.11-3.11.8/Modules/arraymodule.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/arraymodule.c 2024-04-02 08:25:04.000000000 +0000 @@ -244,7 +244,7 @@ if (!PyArg_Parse(v, "b;array item must be integer", &x)) return -1; if (i >= 0) - ((char *)ap->ob_item)[i] = x; + ((unsigned char *)ap->ob_item)[i] = x; return 0; } diff -Nru python3.11-3.11.8/Modules/clinic/_elementtree.c.h python3.11-3.11.9/Modules/clinic/_elementtree.c.h --- python3.11-3.11.8/Modules/clinic/_elementtree.c.h 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/clinic/_elementtree.c.h 2024-04-02 08:25:04.000000000 +0000 @@ -865,6 +865,23 @@ return _elementtree_XMLParser_close_impl(self); } +PyDoc_STRVAR(_elementtree_XMLParser_flush__doc__, +"flush($self, /)\n" +"--\n" +"\n"); + +#define _ELEMENTTREE_XMLPARSER_FLUSH_METHODDEF \ + {"flush", (PyCFunction)_elementtree_XMLParser_flush, METH_NOARGS, _elementtree_XMLParser_flush__doc__}, + +static PyObject * +_elementtree_XMLParser_flush_impl(XMLParserObject *self); + +static PyObject * +_elementtree_XMLParser_flush(XMLParserObject *self, PyObject *Py_UNUSED(ignored)) +{ + return _elementtree_XMLParser_flush_impl(self); +} + PyDoc_STRVAR(_elementtree_XMLParser_feed__doc__, "feed($self, data, /)\n" "--\n" @@ -915,4 +932,4 @@ exit: return return_value; } -/*[clinic end generated code: output=3fd6fa2ce1aeca76 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=88a3c8b7164a6474 input=a9049054013a1b77]*/ diff -Nru python3.11-3.11.8/Modules/clinic/pyexpat.c.h python3.11-3.11.9/Modules/clinic/pyexpat.c.h --- python3.11-3.11.8/Modules/clinic/pyexpat.c.h 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/clinic/pyexpat.c.h 2024-04-02 08:25:04.000000000 +0000 @@ -2,6 +2,53 @@ preserve [clinic start generated code]*/ +PyDoc_STRVAR(pyexpat_xmlparser_SetReparseDeferralEnabled__doc__, +"SetReparseDeferralEnabled($self, enabled, /)\n" +"--\n" +"\n" +"Enable/Disable reparse deferral; enabled by default with Expat >=2.6.0."); + +#define PYEXPAT_XMLPARSER_SETREPARSEDEFERRALENABLED_METHODDEF \ + {"SetReparseDeferralEnabled", (PyCFunction)pyexpat_xmlparser_SetReparseDeferralEnabled, METH_O, pyexpat_xmlparser_SetReparseDeferralEnabled__doc__}, + +static PyObject * +pyexpat_xmlparser_SetReparseDeferralEnabled_impl(xmlparseobject *self, + int enabled); + +static PyObject * +pyexpat_xmlparser_SetReparseDeferralEnabled(xmlparseobject *self, PyObject *arg) +{ + PyObject *return_value = NULL; + int enabled; + + enabled = PyObject_IsTrue(arg); + if (enabled < 0) { + goto exit; + } + return_value = pyexpat_xmlparser_SetReparseDeferralEnabled_impl(self, enabled); + +exit: + return return_value; +} + +PyDoc_STRVAR(pyexpat_xmlparser_GetReparseDeferralEnabled__doc__, +"GetReparseDeferralEnabled($self, /)\n" +"--\n" +"\n" +"Retrieve reparse deferral enabled status; always returns false with Expat <2.6.0."); + +#define PYEXPAT_XMLPARSER_GETREPARSEDEFERRALENABLED_METHODDEF \ + {"GetReparseDeferralEnabled", (PyCFunction)pyexpat_xmlparser_GetReparseDeferralEnabled, METH_NOARGS, pyexpat_xmlparser_GetReparseDeferralEnabled__doc__}, + +static PyObject * +pyexpat_xmlparser_GetReparseDeferralEnabled_impl(xmlparseobject *self); + +static PyObject * +pyexpat_xmlparser_GetReparseDeferralEnabled(xmlparseobject *self, PyObject *Py_UNUSED(ignored)) +{ + return pyexpat_xmlparser_GetReparseDeferralEnabled_impl(self); +} + PyDoc_STRVAR(pyexpat_xmlparser_Parse__doc__, "Parse($self, data, isfinal=False, /)\n" "--\n" @@ -425,4 +472,4 @@ #ifndef PYEXPAT_XMLPARSER_USEFOREIGNDTD_METHODDEF #define PYEXPAT_XMLPARSER_USEFOREIGNDTD_METHODDEF #endif /* !defined(PYEXPAT_XMLPARSER_USEFOREIGNDTD_METHODDEF) */ -/*[clinic end generated code: output=3e333b89da3aa58c input=a9049054013a1b77]*/ +/*[clinic end generated code: output=8d544b917e35add6 input=a9049054013a1b77]*/ diff -Nru python3.11-3.11.8/Modules/expat/expat.h python3.11-3.11.9/Modules/expat/expat.h --- python3.11-3.11.8/Modules/expat/expat.h 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/expat/expat.h 2024-04-02 08:25:04.000000000 +0000 @@ -11,11 +11,13 @@ Copyright (c) 2000-2005 Fred L. Drake, Jr. Copyright (c) 2001-2002 Greg Stein Copyright (c) 2002-2016 Karl Waclawek - Copyright (c) 2016-2022 Sebastian Pipping + Copyright (c) 2016-2024 Sebastian Pipping Copyright (c) 2016 Cristian Rodríguez Copyright (c) 2016 Thomas Beutlich Copyright (c) 2017 Rhodri James Copyright (c) 2022 Thijs Schreijer + Copyright (c) 2023 Hanno Böck + Copyright (c) 2023 Sony Corporation / Snild Dolkow Licensed under the MIT license: Permission is hereby granted, free of charge, to any person obtaining @@ -269,7 +271,7 @@ const XML_Memory_Handling_Suite *memsuite, const XML_Char *namespaceSeparator); -/* Prepare a parser object to be re-used. This is particularly +/* Prepare a parser object to be reused. This is particularly valuable when memory allocation overhead is disproportionately high, such as when a large number of small documnents need to be parsed. All handlers are cleared from the parser, except for the @@ -951,7 +953,7 @@ XMLPARSEAPI(int) XML_GetCurrentByteCount(XML_Parser parser); -/* If XML_CONTEXT_BYTES is defined, returns the input buffer, sets +/* If XML_CONTEXT_BYTES is >=1, returns the input buffer, sets the integer pointed to by offset to the offset within this buffer of the current parse position, and sets the integer pointed to by size to the size of this buffer (the number of input bytes). Otherwise @@ -1025,7 +1027,9 @@ XML_FEATURE_ATTR_INFO, /* Added in Expat 2.4.0. */ XML_FEATURE_BILLION_LAUGHS_ATTACK_PROTECTION_MAXIMUM_AMPLIFICATION_DEFAULT, - XML_FEATURE_BILLION_LAUGHS_ATTACK_PROTECTION_ACTIVATION_THRESHOLD_DEFAULT + XML_FEATURE_BILLION_LAUGHS_ATTACK_PROTECTION_ACTIVATION_THRESHOLD_DEFAULT, + /* Added in Expat 2.6.0. */ + XML_FEATURE_GE /* Additional features must be added to the end of this enum. */ }; @@ -1038,23 +1042,29 @@ XMLPARSEAPI(const XML_Feature *) XML_GetFeatureList(void); -#ifdef XML_DTD -/* Added in Expat 2.4.0. */ +#if XML_GE == 1 +/* Added in Expat 2.4.0 for XML_DTD defined and + * added in Expat 2.6.0 for XML_GE == 1. */ XMLPARSEAPI(XML_Bool) XML_SetBillionLaughsAttackProtectionMaximumAmplification( XML_Parser parser, float maximumAmplificationFactor); -/* Added in Expat 2.4.0. */ +/* Added in Expat 2.4.0 for XML_DTD defined and + * added in Expat 2.6.0 for XML_GE == 1. */ XMLPARSEAPI(XML_Bool) XML_SetBillionLaughsAttackProtectionActivationThreshold( XML_Parser parser, unsigned long long activationThresholdBytes); #endif +/* Added in Expat 2.6.0. */ +XMLPARSEAPI(XML_Bool) +XML_SetReparseDeferralEnabled(XML_Parser parser, XML_Bool enabled); + /* Expat follows the semantic versioning convention. - See http://semver.org. + See https://semver.org */ #define XML_MAJOR_VERSION 2 -#define XML_MINOR_VERSION 5 +#define XML_MINOR_VERSION 6 #define XML_MICRO_VERSION 0 #ifdef __cplusplus diff -Nru python3.11-3.11.8/Modules/expat/expat_config.h python3.11-3.11.9/Modules/expat/expat_config.h --- python3.11-3.11.8/Modules/expat/expat_config.h 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/expat/expat_config.h 2024-04-02 08:25:04.000000000 +0000 @@ -16,6 +16,7 @@ #define XML_NS 1 #define XML_DTD 1 +#define XML_GE 1 #define XML_CONTEXT_BYTES 1024 // bpo-30947: Python uses best available entropy sources to diff -Nru python3.11-3.11.8/Modules/expat/internal.h python3.11-3.11.9/Modules/expat/internal.h --- python3.11-3.11.8/Modules/expat/internal.h 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/expat/internal.h 2024-04-02 08:25:04.000000000 +0000 @@ -28,9 +28,10 @@ Copyright (c) 2002-2003 Fred L. Drake, Jr. Copyright (c) 2002-2006 Karl Waclawek Copyright (c) 2003 Greg Stein - Copyright (c) 2016-2022 Sebastian Pipping + Copyright (c) 2016-2023 Sebastian Pipping Copyright (c) 2018 Yury Gribov Copyright (c) 2019 David Loffredo + Copyright (c) 2023 Sony Corporation / Snild Dolkow Licensed under the MIT license: Permission is hereby granted, free of charge, to any person obtaining @@ -154,12 +155,15 @@ void _INTERNAL_trim_to_complete_utf8_characters(const char *from, const char **fromLimRef); -#if defined(XML_DTD) +#if XML_GE == 1 unsigned long long testingAccountingGetCountBytesDirect(XML_Parser parser); unsigned long long testingAccountingGetCountBytesIndirect(XML_Parser parser); const char *unsignedCharToPrintable(unsigned char c); #endif +extern XML_Bool g_reparseDeferralEnabledDefault; // written ONLY in runtests.c +extern unsigned int g_parseAttempts; // used for testing only + #ifdef __cplusplus } #endif diff -Nru python3.11-3.11.8/Modules/expat/pyexpatns.h python3.11-3.11.9/Modules/expat/pyexpatns.h --- python3.11-3.11.8/Modules/expat/pyexpatns.h 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/expat/pyexpatns.h 2024-04-02 08:25:04.000000000 +0000 @@ -108,6 +108,7 @@ #define XML_SetNotStandaloneHandler PyExpat_XML_SetNotStandaloneHandler #define XML_SetParamEntityParsing PyExpat_XML_SetParamEntityParsing #define XML_SetProcessingInstructionHandler PyExpat_XML_SetProcessingInstructionHandler +#define XML_SetReparseDeferralEnabled PyExpat_XML_SetReparseDeferralEnabled #define XML_SetReturnNSTriplet PyExpat_XML_SetReturnNSTriplet #define XML_SetSkippedEntityHandler PyExpat_XML_SetSkippedEntityHandler #define XML_SetStartCdataSectionHandler PyExpat_XML_SetStartCdataSectionHandler diff -Nru python3.11-3.11.8/Modules/expat/siphash.h python3.11-3.11.9/Modules/expat/siphash.h --- python3.11-3.11.8/Modules/expat/siphash.h 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/expat/siphash.h 2024-04-02 08:25:04.000000000 +0000 @@ -106,7 +106,7 @@ * if this code is included and compiled as C++; related GCC warning is: * warning: use of C++11 long long integer constant [-Wlong-long] */ -#define _SIP_ULL(high, low) ((((uint64_t)high) << 32) | (low)) +#define SIP_ULL(high, low) ((((uint64_t)high) << 32) | (low)) #define SIP_ROTL(x, b) (uint64_t)(((x) << (b)) | ((x) >> (64 - (b)))) @@ -190,10 +190,10 @@ static struct siphash * sip24_init(struct siphash *H, const struct sipkey *key) { - H->v0 = _SIP_ULL(0x736f6d65U, 0x70736575U) ^ key->k[0]; - H->v1 = _SIP_ULL(0x646f7261U, 0x6e646f6dU) ^ key->k[1]; - H->v2 = _SIP_ULL(0x6c796765U, 0x6e657261U) ^ key->k[0]; - H->v3 = _SIP_ULL(0x74656462U, 0x79746573U) ^ key->k[1]; + H->v0 = SIP_ULL(0x736f6d65U, 0x70736575U) ^ key->k[0]; + H->v1 = SIP_ULL(0x646f7261U, 0x6e646f6dU) ^ key->k[1]; + H->v2 = SIP_ULL(0x6c796765U, 0x6e657261U) ^ key->k[0]; + H->v3 = SIP_ULL(0x74656462U, 0x79746573U) ^ key->k[1]; H->p = H->buf; H->c = 0; diff -Nru python3.11-3.11.8/Modules/expat/winconfig.h python3.11-3.11.9/Modules/expat/winconfig.h --- python3.11-3.11.8/Modules/expat/winconfig.h 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/expat/winconfig.h 2024-04-02 08:25:04.000000000 +0000 @@ -9,7 +9,8 @@ Copyright (c) 2000 Clark Cooper Copyright (c) 2002 Greg Stein Copyright (c) 2005 Karl Waclawek - Copyright (c) 2017-2021 Sebastian Pipping + Copyright (c) 2017-2023 Sebastian Pipping + Copyright (c) 2023 Orgad Shaneh Licensed under the MIT license: Permission is hereby granted, free of charge, to any person obtaining @@ -35,7 +36,9 @@ #ifndef WINCONFIG_H #define WINCONFIG_H -#define WIN32_LEAN_AND_MEAN +#ifndef WIN32_LEAN_AND_MEAN +# define WIN32_LEAN_AND_MEAN +#endif #include #undef WIN32_LEAN_AND_MEAN diff -Nru python3.11-3.11.8/Modules/expat/xmlparse.c python3.11-3.11.9/Modules/expat/xmlparse.c --- python3.11-3.11.8/Modules/expat/xmlparse.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/expat/xmlparse.c 2024-04-02 08:25:04.000000000 +0000 @@ -1,4 +1,4 @@ -/* 5ab094ffadd6edfc94c3eee53af44a86951f9f1f0933ada3114bbce2bfb02c99 (2.5.0+) +/* 628e24d4966bedbd4800f6ed128d06d29703765b4bce12d3b7f099f90f842fc9 (2.6.0+) __ __ _ ___\ \/ /_ __ __ _| |_ / _ \\ /| '_ \ / _` | __| @@ -13,7 +13,7 @@ Copyright (c) 2002-2016 Karl Waclawek Copyright (c) 2005-2009 Steven Solie Copyright (c) 2016 Eric Rahm - Copyright (c) 2016-2022 Sebastian Pipping + Copyright (c) 2016-2024 Sebastian Pipping Copyright (c) 2016 Gaurav Copyright (c) 2016 Thomas Beutlich Copyright (c) 2016 Gustavo Grieco @@ -32,10 +32,13 @@ Copyright (c) 2019 David Loffredo Copyright (c) 2019-2020 Ben Wagner Copyright (c) 2019 Vadim Zeitlin - Copyright (c) 2021 Dong-hee Na + Copyright (c) 2021 Donghee Na Copyright (c) 2022 Samanta Navarro Copyright (c) 2022 Jeffrey Walton Copyright (c) 2022 Jann Horn + Copyright (c) 2022 Sean McBride + Copyright (c) 2023 Owain Davies + Copyright (c) 2023 Sony Corporation / Snild Dolkow Licensed under the MIT license: Permission is hereby granted, free of charge, to any person obtaining @@ -60,10 +63,25 @@ #define XML_BUILDING_EXPAT 1 -#include +#include "expat_config.h" -#if ! defined(_GNU_SOURCE) -# define _GNU_SOURCE 1 /* syscall prototype */ +#if ! defined(XML_GE) || (1 - XML_GE - 1 == 2) || (XML_GE < 0) || (XML_GE > 1) +# error XML_GE (for general entities) must be defined, non-empty, either 1 or 0 (0 to disable, 1 to enable; 1 is a common default) +#endif + +#if defined(XML_DTD) && XML_GE == 0 +# error Either undefine XML_DTD or define XML_GE to 1. +#endif + +#if ! defined(XML_CONTEXT_BYTES) || (1 - XML_CONTEXT_BYTES - 1 == 2) \ + || (XML_CONTEXT_BYTES + 0 < 0) +# error XML_CONTEXT_BYTES must be defined, non-empty and >=0 (0 to disable, >=1 to enable; 1024 is a common default) +#endif + +#if defined(HAVE_SYSCALL_GETRANDOM) +# if ! defined(_GNU_SOURCE) +# define _GNU_SOURCE 1 /* syscall prototype */ +# endif #endif #ifdef _WIN32 @@ -73,6 +91,7 @@ # endif #endif +#include #include #include /* memset(), memcpy() */ #include @@ -131,8 +150,8 @@ Your options include: \ * Linux >=3.17 + glibc >=2.25 (getrandom): HAVE_GETRANDOM, \ * Linux >=3.17 + glibc (including <2.25) (syscall SYS_getrandom): HAVE_SYSCALL_GETRANDOM, \ - * BSD / macOS >=10.7 (arc4random_buf): HAVE_ARC4RANDOM_BUF, \ - * BSD / macOS (including <10.7) (arc4random): HAVE_ARC4RANDOM, \ + * BSD / macOS >=10.7 / glibc >=2.36 (arc4random_buf): HAVE_ARC4RANDOM_BUF, \ + * BSD / macOS (including <10.7) / glibc >=2.36 (arc4random): HAVE_ARC4RANDOM, \ * libbsd (arc4random_buf): HAVE_ARC4RANDOM_BUF + HAVE_LIBBSD, \ * libbsd (arc4random): HAVE_ARC4RANDOM + HAVE_LIBBSD, \ * Linux (including <3.17) / BSD / macOS (including <10.7) / Solaris >=8 (/dev/urandom): XML_DEV_URANDOM, \ @@ -196,6 +215,8 @@ /* Do safe (NULL-aware) pointer arithmetic */ #define EXPAT_SAFE_PTR_DIFF(p, q) (((p) && (q)) ? ((p) - (q)) : 0) +#define EXPAT_MIN(a, b) (((a) < (b)) ? (a) : (b)) + #include "internal.h" #include "xmltok.h" #include "xmlrole.h" @@ -279,7 +300,7 @@ XML_Parse()/XML_ParseBuffer(), the buffer is re-allocated to contain the 'raw' name as well. - A parser re-uses these structures, maintaining a list of allocated + A parser reuses these structures, maintaining a list of allocated TAG objects in a free list. */ typedef struct tag { @@ -408,12 +429,12 @@ XML_ACCOUNT_NONE /* i.e. do not account, was accounted already */ }; -#ifdef XML_DTD +#if XML_GE == 1 typedef unsigned long long XmlBigCount; typedef struct accounting { XmlBigCount countBytesDirect; XmlBigCount countBytesIndirect; - int debugLevel; + unsigned long debugLevel; float maximumAmplificationFactor; // >=1.0 unsigned long long activationThresholdBytes; } ACCOUNTING; @@ -422,9 +443,9 @@ unsigned int countEverOpened; unsigned int currentDepth; unsigned int maximumDepthSeen; - int debugLevel; + unsigned long debugLevel; } ENTITY_STATS; -#endif /* XML_DTD */ +#endif /* XML_GE == 1 */ typedef enum XML_Error PTRCALL Processor(XML_Parser parser, const char *start, const char *end, const char **endPtr); @@ -464,41 +485,47 @@ const ENCODING *enc, const char *start, const char *end, const char **endPtr, XML_Bool haveMore, enum XML_Account account); -static enum XML_Error doCdataSection(XML_Parser parser, const ENCODING *, +static enum XML_Error doCdataSection(XML_Parser parser, const ENCODING *enc, const char **startPtr, const char *end, const char **nextPtr, XML_Bool haveMore, enum XML_Account account); #ifdef XML_DTD -static enum XML_Error doIgnoreSection(XML_Parser parser, const ENCODING *, +static enum XML_Error doIgnoreSection(XML_Parser parser, const ENCODING *enc, const char **startPtr, const char *end, const char **nextPtr, XML_Bool haveMore); #endif /* XML_DTD */ static void freeBindings(XML_Parser parser, BINDING *bindings); -static enum XML_Error storeAtts(XML_Parser parser, const ENCODING *, - const char *s, TAG_NAME *tagNamePtr, +static enum XML_Error storeAtts(XML_Parser parser, const ENCODING *enc, + const char *attStr, TAG_NAME *tagNamePtr, BINDING **bindingsPtr, enum XML_Account account); static enum XML_Error addBinding(XML_Parser parser, PREFIX *prefix, const ATTRIBUTE_ID *attId, const XML_Char *uri, BINDING **bindingsPtr); -static int defineAttribute(ELEMENT_TYPE *type, ATTRIBUTE_ID *, XML_Bool isCdata, - XML_Bool isId, const XML_Char *dfltValue, - XML_Parser parser); -static enum XML_Error storeAttributeValue(XML_Parser parser, const ENCODING *, - XML_Bool isCdata, const char *, - const char *, STRING_POOL *, +static int defineAttribute(ELEMENT_TYPE *type, ATTRIBUTE_ID *attId, + XML_Bool isCdata, XML_Bool isId, + const XML_Char *value, XML_Parser parser); +static enum XML_Error storeAttributeValue(XML_Parser parser, + const ENCODING *enc, XML_Bool isCdata, + const char *ptr, const char *end, + STRING_POOL *pool, enum XML_Account account); -static enum XML_Error appendAttributeValue(XML_Parser parser, const ENCODING *, - XML_Bool isCdata, const char *, - const char *, STRING_POOL *, +static enum XML_Error appendAttributeValue(XML_Parser parser, + const ENCODING *enc, + XML_Bool isCdata, const char *ptr, + const char *end, STRING_POOL *pool, enum XML_Account account); static ATTRIBUTE_ID *getAttributeId(XML_Parser parser, const ENCODING *enc, const char *start, const char *end); -static int setElementTypePrefix(XML_Parser parser, ELEMENT_TYPE *); +static int setElementTypePrefix(XML_Parser parser, ELEMENT_TYPE *elementType); +#if XML_GE == 1 static enum XML_Error storeEntityValue(XML_Parser parser, const ENCODING *enc, const char *start, const char *end, enum XML_Account account); +#else +static enum XML_Error storeSelfEntityValue(XML_Parser parser, ENTITY *entity); +#endif static int reportProcessingInstruction(XML_Parser parser, const ENCODING *enc, const char *start, const char *end); static int reportComment(XML_Parser parser, const ENCODING *enc, @@ -518,21 +545,22 @@ const XML_Memory_Handling_Suite *ms); static int dtdCopy(XML_Parser oldParser, DTD *newDtd, const DTD *oldDtd, const XML_Memory_Handling_Suite *ms); -static int copyEntityTable(XML_Parser oldParser, HASH_TABLE *, STRING_POOL *, - const HASH_TABLE *); +static int copyEntityTable(XML_Parser oldParser, HASH_TABLE *newTable, + STRING_POOL *newPool, const HASH_TABLE *oldTable); static NAMED *lookup(XML_Parser parser, HASH_TABLE *table, KEY name, size_t createSize); -static void FASTCALL hashTableInit(HASH_TABLE *, +static void FASTCALL hashTableInit(HASH_TABLE *table, const XML_Memory_Handling_Suite *ms); -static void FASTCALL hashTableClear(HASH_TABLE *); -static void FASTCALL hashTableDestroy(HASH_TABLE *); -static void FASTCALL hashTableIterInit(HASH_TABLE_ITER *, const HASH_TABLE *); -static NAMED *FASTCALL hashTableIterNext(HASH_TABLE_ITER *); +static void FASTCALL hashTableClear(HASH_TABLE *table); +static void FASTCALL hashTableDestroy(HASH_TABLE *table); +static void FASTCALL hashTableIterInit(HASH_TABLE_ITER *iter, + const HASH_TABLE *table); +static NAMED *FASTCALL hashTableIterNext(HASH_TABLE_ITER *iter); -static void FASTCALL poolInit(STRING_POOL *, +static void FASTCALL poolInit(STRING_POOL *pool, const XML_Memory_Handling_Suite *ms); -static void FASTCALL poolClear(STRING_POOL *); -static void FASTCALL poolDestroy(STRING_POOL *); +static void FASTCALL poolClear(STRING_POOL *pool); +static void FASTCALL poolDestroy(STRING_POOL *pool); static XML_Char *poolAppend(STRING_POOL *pool, const ENCODING *enc, const char *ptr, const char *end); static XML_Char *poolStoreString(STRING_POOL *pool, const ENCODING *enc, @@ -562,7 +590,7 @@ static void parserInit(XML_Parser parser, const XML_Char *encodingName); -#ifdef XML_DTD +#if XML_GE == 1 static float accountingGetCurrentAmplification(XML_Parser rootParser); static void accountingReportStats(XML_Parser originParser, const char *epilog); static void accountingOnAbort(XML_Parser originParser); @@ -585,13 +613,12 @@ static XML_Parser getRootParserOf(XML_Parser parser, unsigned int *outLevelDiff); -#endif /* XML_DTD */ +#endif /* XML_GE == 1 */ static unsigned long getDebugLevel(const char *variableName, unsigned long defaultDebugLevel); #define poolStart(pool) ((pool)->start) -#define poolEnd(pool) ((pool)->ptr) #define poolLength(pool) ((pool)->ptr - (pool)->start) #define poolChop(pool) ((void)--(pool->ptr)) #define poolLastChar(pool) (((pool)->ptr)[-1]) @@ -602,21 +629,35 @@ ? 0 \ : ((*((pool)->ptr)++ = c), 1)) +XML_Bool g_reparseDeferralEnabledDefault = XML_TRUE; // write ONLY in runtests.c +unsigned int g_parseAttempts = 0; // used for testing only + struct XML_ParserStruct { /* The first member must be m_userData so that the XML_GetUserData macro works. */ void *m_userData; void *m_handlerArg; - char *m_buffer; + + // How the four parse buffer pointers below relate in time and space: + // + // m_buffer <= m_bufferPtr <= m_bufferEnd <= m_bufferLim + // | | | | + // <--parsed-->| | | + // <---parsing--->| | + // <--unoccupied-->| + // <---------total-malloced/realloced-------->| + + char *m_buffer; // malloc/realloc base pointer of parse buffer const XML_Memory_Handling_Suite m_mem; - /* first character to be parsed */ - const char *m_bufferPtr; - /* past last character to be parsed */ - char *m_bufferEnd; - /* allocated end of m_buffer */ - const char *m_bufferLim; + const char *m_bufferPtr; // first character to be parsed + char *m_bufferEnd; // past last character to be parsed + const char *m_bufferLim; // allocated end of m_buffer + XML_Index m_parseEndByteIndex; const char *m_parseEndPtr; + size_t m_partialTokenBytesBefore; /* used in heuristic to avoid O(n^2) */ + XML_Bool m_reparseDeferralEnabled; + int m_lastBufferRequestSize; XML_Char *m_dataBuf; XML_Char *m_dataBufEnd; XML_StartElementHandler m_startElementHandler; @@ -703,7 +744,7 @@ enum XML_ParamEntityParsing m_paramEntityParsing; #endif unsigned long m_hash_secret_salt; -#ifdef XML_DTD +#if XML_GE == 1 ACCOUNTING m_accounting; ENTITY_STATS m_entity_stats; #endif @@ -948,6 +989,47 @@ return parser->m_hash_secret_salt; } +static enum XML_Error +callProcessor(XML_Parser parser, const char *start, const char *end, + const char **endPtr) { + const size_t have_now = EXPAT_SAFE_PTR_DIFF(end, start); + + if (parser->m_reparseDeferralEnabled + && ! parser->m_parsingStatus.finalBuffer) { + // Heuristic: don't try to parse a partial token again until the amount of + // available data has increased significantly. + const size_t had_before = parser->m_partialTokenBytesBefore; + // ...but *do* try anyway if we're close to causing a reallocation. + size_t available_buffer + = EXPAT_SAFE_PTR_DIFF(parser->m_bufferPtr, parser->m_buffer); +#if XML_CONTEXT_BYTES > 0 + available_buffer -= EXPAT_MIN(available_buffer, XML_CONTEXT_BYTES); +#endif + available_buffer + += EXPAT_SAFE_PTR_DIFF(parser->m_bufferLim, parser->m_bufferEnd); + // m_lastBufferRequestSize is never assigned a value < 0, so the cast is ok + const bool enough + = (have_now >= 2 * had_before) + || ((size_t)parser->m_lastBufferRequestSize > available_buffer); + + if (! enough) { + *endPtr = start; // callers may expect this to be set + return XML_ERROR_NONE; + } + } + g_parseAttempts += 1; + const enum XML_Error ret = parser->m_processor(parser, start, end, endPtr); + if (ret == XML_ERROR_NONE) { + // if we consumed nothing, remember what we had on this parse attempt. + if (*endPtr == start) { + parser->m_partialTokenBytesBefore = have_now; + } else { + parser->m_partialTokenBytesBefore = 0; + } + } + return ret; +} + static XML_Bool /* only valid for root parser */ startParsing(XML_Parser parser) { /* hash functions must be initialized before setContext() is called */ @@ -1129,6 +1211,9 @@ parser->m_bufferEnd = parser->m_buffer; parser->m_parseEndByteIndex = 0; parser->m_parseEndPtr = NULL; + parser->m_partialTokenBytesBefore = 0; + parser->m_reparseDeferralEnabled = g_reparseDeferralEnabledDefault; + parser->m_lastBufferRequestSize = 0; parser->m_declElementType = NULL; parser->m_declAttributeId = NULL; parser->m_declEntity = NULL; @@ -1163,7 +1248,7 @@ #endif parser->m_hash_secret_salt = 0; -#ifdef XML_DTD +#if XML_GE == 1 memset(&parser->m_accounting, 0, sizeof(ACCOUNTING)); parser->m_accounting.debugLevel = getDebugLevel("EXPAT_ACCOUNTING_DEBUG", 0u); parser->m_accounting.maximumAmplificationFactor @@ -1298,6 +1383,7 @@ to worry which hash secrets each table has. */ unsigned long oldhash_secret_salt; + XML_Bool oldReparseDeferralEnabled; /* Validate the oldParser parameter before we pull everything out of it */ if (oldParser == NULL) @@ -1342,6 +1428,7 @@ to worry which hash secrets each table has. */ oldhash_secret_salt = parser->m_hash_secret_salt; + oldReparseDeferralEnabled = parser->m_reparseDeferralEnabled; #ifdef XML_DTD if (! context) @@ -1394,6 +1481,7 @@ parser->m_defaultExpandInternalEntities = oldDefaultExpandInternalEntities; parser->m_ns_triplets = oldns_triplets; parser->m_hash_secret_salt = oldhash_secret_salt; + parser->m_reparseDeferralEnabled = oldReparseDeferralEnabled; parser->m_parentParser = oldParser; #ifdef XML_DTD parser->m_paramEntityParsing = oldParamEntityParsing; @@ -1848,55 +1936,8 @@ parser->m_parsingStatus.parsing = XML_PARSING; } - if (len == 0) { - parser->m_parsingStatus.finalBuffer = (XML_Bool)isFinal; - if (! isFinal) - return XML_STATUS_OK; - parser->m_positionPtr = parser->m_bufferPtr; - parser->m_parseEndPtr = parser->m_bufferEnd; - - /* If data are left over from last buffer, and we now know that these - data are the final chunk of input, then we have to check them again - to detect errors based on that fact. - */ - parser->m_errorCode - = parser->m_processor(parser, parser->m_bufferPtr, - parser->m_parseEndPtr, &parser->m_bufferPtr); - - if (parser->m_errorCode == XML_ERROR_NONE) { - switch (parser->m_parsingStatus.parsing) { - case XML_SUSPENDED: - /* It is hard to be certain, but it seems that this case - * cannot occur. This code is cleaning up a previous parse - * with no new data (since len == 0). Changing the parsing - * state requires getting to execute a handler function, and - * there doesn't seem to be an opportunity for that while in - * this circumstance. - * - * Given the uncertainty, we retain the code but exclude it - * from coverage tests. - * - * LCOV_EXCL_START - */ - XmlUpdatePosition(parser->m_encoding, parser->m_positionPtr, - parser->m_bufferPtr, &parser->m_position); - parser->m_positionPtr = parser->m_bufferPtr; - return XML_STATUS_SUSPENDED; - /* LCOV_EXCL_STOP */ - case XML_INITIALIZED: - case XML_PARSING: - parser->m_parsingStatus.parsing = XML_FINISHED; - /* fall through */ - default: - return XML_STATUS_OK; - } - } - parser->m_eventEndPtr = parser->m_eventPtr; - parser->m_processor = errorProcessor; - return XML_STATUS_ERROR; - } -#ifndef XML_CONTEXT_BYTES - else if (parser->m_bufferPtr == parser->m_bufferEnd) { +#if XML_CONTEXT_BYTES == 0 + if (parser->m_bufferPtr == parser->m_bufferEnd) { const char *end; int nLeftOver; enum XML_Status result; @@ -1907,12 +1948,15 @@ parser->m_processor = errorProcessor; return XML_STATUS_ERROR; } + // though this isn't a buffer request, we assume that `len` is the app's + // preferred buffer fill size, and therefore save it here. + parser->m_lastBufferRequestSize = len; parser->m_parseEndByteIndex += len; parser->m_positionPtr = s; parser->m_parsingStatus.finalBuffer = (XML_Bool)isFinal; parser->m_errorCode - = parser->m_processor(parser, s, parser->m_parseEndPtr = s + len, &end); + = callProcessor(parser, s, parser->m_parseEndPtr = s + len, &end); if (parser->m_errorCode != XML_ERROR_NONE) { parser->m_eventEndPtr = parser->m_eventPtr; @@ -1939,23 +1983,25 @@ &parser->m_position); nLeftOver = s + len - end; if (nLeftOver) { - if (parser->m_buffer == NULL - || nLeftOver > parser->m_bufferLim - parser->m_buffer) { - /* avoid _signed_ integer overflow */ - char *temp = NULL; - const int bytesToAllocate = (int)((unsigned)len * 2U); - if (bytesToAllocate > 0) { - temp = (char *)REALLOC(parser, parser->m_buffer, bytesToAllocate); - } - if (temp == NULL) { - parser->m_errorCode = XML_ERROR_NO_MEMORY; - parser->m_eventPtr = parser->m_eventEndPtr = NULL; - parser->m_processor = errorProcessor; - return XML_STATUS_ERROR; - } - parser->m_buffer = temp; - parser->m_bufferLim = parser->m_buffer + bytesToAllocate; + // Back up and restore the parsing status to avoid XML_ERROR_SUSPENDED + // (and XML_ERROR_FINISHED) from XML_GetBuffer. + const enum XML_Parsing originalStatus = parser->m_parsingStatus.parsing; + parser->m_parsingStatus.parsing = XML_PARSING; + void *const temp = XML_GetBuffer(parser, nLeftOver); + parser->m_parsingStatus.parsing = originalStatus; + // GetBuffer may have overwritten this, but we want to remember what the + // app requested, not how many bytes were left over after parsing. + parser->m_lastBufferRequestSize = len; + if (temp == NULL) { + // NOTE: parser->m_errorCode has already been set by XML_GetBuffer(). + parser->m_eventPtr = parser->m_eventEndPtr = NULL; + parser->m_processor = errorProcessor; + return XML_STATUS_ERROR; } + // Since we know that the buffer was empty and XML_CONTEXT_BYTES is 0, we + // don't have any data to preserve, and can copy straight into the start + // of the buffer rather than the GetBuffer return pointer (which may be + // pointing further into the allocated buffer). memcpy(parser->m_buffer, end, nLeftOver); } parser->m_bufferPtr = parser->m_buffer; @@ -1966,16 +2012,15 @@ parser->m_eventEndPtr = parser->m_bufferPtr; return result; } -#endif /* not defined XML_CONTEXT_BYTES */ - else { - void *buff = XML_GetBuffer(parser, len); - if (buff == NULL) - return XML_STATUS_ERROR; - else { - memcpy(buff, s, len); - return XML_ParseBuffer(parser, len, isFinal); - } +#endif /* XML_CONTEXT_BYTES == 0 */ + void *buff = XML_GetBuffer(parser, len); + if (buff == NULL) + return XML_STATUS_ERROR; + if (len > 0) { + assert(s != NULL); // make sure s==NULL && len!=0 was rejected above + memcpy(buff, s, len); } + return XML_ParseBuffer(parser, len, isFinal); } enum XML_Status XMLCALL @@ -2015,8 +2060,8 @@ parser->m_parseEndByteIndex += len; parser->m_parsingStatus.finalBuffer = (XML_Bool)isFinal; - parser->m_errorCode = parser->m_processor( - parser, start, parser->m_parseEndPtr, &parser->m_bufferPtr); + parser->m_errorCode = callProcessor(parser, start, parser->m_parseEndPtr, + &parser->m_bufferPtr); if (parser->m_errorCode != XML_ERROR_NONE) { parser->m_eventEndPtr = parser->m_eventPtr; @@ -2061,10 +2106,14 @@ default:; } - if (len > EXPAT_SAFE_PTR_DIFF(parser->m_bufferLim, parser->m_bufferEnd)) { -#ifdef XML_CONTEXT_BYTES + // whether or not the request succeeds, `len` seems to be the app's preferred + // buffer fill size; remember it. + parser->m_lastBufferRequestSize = len; + if (len > EXPAT_SAFE_PTR_DIFF(parser->m_bufferLim, parser->m_bufferEnd) + || parser->m_buffer == NULL) { +#if XML_CONTEXT_BYTES > 0 int keep; -#endif /* defined XML_CONTEXT_BYTES */ +#endif /* XML_CONTEXT_BYTES > 0 */ /* Do not invoke signed arithmetic overflow: */ int neededSize = (int)((unsigned)len + (unsigned)EXPAT_SAFE_PTR_DIFF( @@ -2073,7 +2122,7 @@ parser->m_errorCode = XML_ERROR_NO_MEMORY; return NULL; } -#ifdef XML_CONTEXT_BYTES +#if XML_CONTEXT_BYTES > 0 keep = (int)EXPAT_SAFE_PTR_DIFF(parser->m_bufferPtr, parser->m_buffer); if (keep > XML_CONTEXT_BYTES) keep = XML_CONTEXT_BYTES; @@ -2083,10 +2132,11 @@ return NULL; } neededSize += keep; -#endif /* defined XML_CONTEXT_BYTES */ - if (neededSize - <= EXPAT_SAFE_PTR_DIFF(parser->m_bufferLim, parser->m_buffer)) { -#ifdef XML_CONTEXT_BYTES +#endif /* XML_CONTEXT_BYTES > 0 */ + if (parser->m_buffer && parser->m_bufferPtr + && neededSize + <= EXPAT_SAFE_PTR_DIFF(parser->m_bufferLim, parser->m_buffer)) { +#if XML_CONTEXT_BYTES > 0 if (keep < EXPAT_SAFE_PTR_DIFF(parser->m_bufferPtr, parser->m_buffer)) { int offset = (int)EXPAT_SAFE_PTR_DIFF(parser->m_bufferPtr, parser->m_buffer) @@ -2099,19 +2149,17 @@ parser->m_bufferPtr -= offset; } #else - if (parser->m_buffer && parser->m_bufferPtr) { - memmove(parser->m_buffer, parser->m_bufferPtr, - EXPAT_SAFE_PTR_DIFF(parser->m_bufferEnd, parser->m_bufferPtr)); - parser->m_bufferEnd - = parser->m_buffer - + EXPAT_SAFE_PTR_DIFF(parser->m_bufferEnd, parser->m_bufferPtr); - parser->m_bufferPtr = parser->m_buffer; - } -#endif /* not defined XML_CONTEXT_BYTES */ + memmove(parser->m_buffer, parser->m_bufferPtr, + EXPAT_SAFE_PTR_DIFF(parser->m_bufferEnd, parser->m_bufferPtr)); + parser->m_bufferEnd + = parser->m_buffer + + EXPAT_SAFE_PTR_DIFF(parser->m_bufferEnd, parser->m_bufferPtr); + parser->m_bufferPtr = parser->m_buffer; +#endif /* XML_CONTEXT_BYTES > 0 */ } else { char *newBuf; int bufferSize - = (int)EXPAT_SAFE_PTR_DIFF(parser->m_bufferLim, parser->m_bufferPtr); + = (int)EXPAT_SAFE_PTR_DIFF(parser->m_bufferLim, parser->m_buffer); if (bufferSize == 0) bufferSize = INIT_BUFFER_SIZE; do { @@ -2128,7 +2176,7 @@ return NULL; } parser->m_bufferLim = newBuf + bufferSize; -#ifdef XML_CONTEXT_BYTES +#if XML_CONTEXT_BYTES > 0 if (parser->m_bufferPtr) { memcpy(newBuf, &parser->m_bufferPtr[-keep], EXPAT_SAFE_PTR_DIFF(parser->m_bufferEnd, parser->m_bufferPtr) @@ -2158,7 +2206,7 @@ parser->m_bufferEnd = newBuf; } parser->m_bufferPtr = parser->m_buffer = newBuf; -#endif /* not defined XML_CONTEXT_BYTES */ +#endif /* XML_CONTEXT_BYTES > 0 */ } parser->m_eventPtr = parser->m_eventEndPtr = NULL; parser->m_positionPtr = NULL; @@ -2208,7 +2256,7 @@ } parser->m_parsingStatus.parsing = XML_PARSING; - parser->m_errorCode = parser->m_processor( + parser->m_errorCode = callProcessor( parser, parser->m_bufferPtr, parser->m_parseEndPtr, &parser->m_bufferPtr); if (parser->m_errorCode != XML_ERROR_NONE) { @@ -2272,7 +2320,7 @@ const char *XMLCALL XML_GetInputContext(XML_Parser parser, int *offset, int *size) { -#ifdef XML_CONTEXT_BYTES +#if XML_CONTEXT_BYTES > 0 if (parser == NULL) return NULL; if (parser->m_eventPtr && parser->m_buffer) { @@ -2286,7 +2334,7 @@ (void)parser; (void)offset; (void)size; -#endif /* defined XML_CONTEXT_BYTES */ +#endif /* XML_CONTEXT_BYTES > 0 */ return (const char *)0; } @@ -2506,7 +2554,7 @@ #ifdef XML_DTD {XML_FEATURE_DTD, XML_L("XML_DTD"), 0}, #endif -#ifdef XML_CONTEXT_BYTES +#if XML_CONTEXT_BYTES > 0 {XML_FEATURE_CONTEXT_BYTES, XML_L("XML_CONTEXT_BYTES"), XML_CONTEXT_BYTES}, #endif @@ -2522,8 +2570,9 @@ #ifdef XML_ATTR_INFO {XML_FEATURE_ATTR_INFO, XML_L("XML_ATTR_INFO"), 0}, #endif -#ifdef XML_DTD - /* Added in Expat 2.4.0. */ +#if XML_GE == 1 + /* Added in Expat 2.4.0 for XML_DTD defined and + * added in Expat 2.6.0 for XML_GE == 1. */ {XML_FEATURE_BILLION_LAUGHS_ATTACK_PROTECTION_MAXIMUM_AMPLIFICATION_DEFAULT, XML_L("XML_BLAP_MAX_AMP"), (long int) @@ -2531,13 +2580,15 @@ {XML_FEATURE_BILLION_LAUGHS_ATTACK_PROTECTION_ACTIVATION_THRESHOLD_DEFAULT, XML_L("XML_BLAP_ACT_THRES"), EXPAT_BILLION_LAUGHS_ATTACK_PROTECTION_ACTIVATION_THRESHOLD_DEFAULT}, + /* Added in Expat 2.6.0. */ + {XML_FEATURE_GE, XML_L("XML_GE"), 0}, #endif {XML_FEATURE_END, NULL, 0}}; return features; } -#ifdef XML_DTD +#if XML_GE == 1 XML_Bool XMLCALL XML_SetBillionLaughsAttackProtectionMaximumAmplification( XML_Parser parser, float maximumAmplificationFactor) { @@ -2559,7 +2610,16 @@ parser->m_accounting.activationThresholdBytes = activationThresholdBytes; return XML_TRUE; } -#endif /* XML_DTD */ +#endif /* XML_GE == 1 */ + +XML_Bool XMLCALL +XML_SetReparseDeferralEnabled(XML_Parser parser, XML_Bool enabled) { + if (parser != NULL && (enabled == XML_TRUE || enabled == XML_FALSE)) { + parser->m_reparseDeferralEnabled = enabled; + return XML_TRUE; + } + return XML_FALSE; +} /* Initially tag->rawName always points into the parse buffer; for those TAG instances opened while the current parse buffer was @@ -2581,7 +2641,7 @@ */ if (tag->rawName == rawNameBuf) break; - /* For re-use purposes we need to ensure that the + /* For reuse purposes we need to ensure that the size of tag->buf is a multiple of sizeof(XML_Char). */ rawNameLen = ROUND_UP(tag->rawNameLength, sizeof(XML_Char)); @@ -2645,13 +2705,13 @@ int tok = XmlContentTok(parser->m_encoding, start, end, &next); switch (tok) { case XML_TOK_BOM: -#ifdef XML_DTD +#if XML_GE == 1 if (! accountingDiffTolerated(parser, tok, start, next, __LINE__, XML_ACCOUNT_DIRECT)) { accountingOnAbort(parser); return XML_ERROR_AMPLIFICATION_LIMIT_BREACH; } -#endif /* XML_DTD */ +#endif /* XML_GE == 1 */ /* If we are at the end of the buffer, this would cause the next stage, i.e. externalEntityInitProcessor3, to pass control directly to @@ -2765,7 +2825,7 @@ for (;;) { const char *next = s; /* XmlContentTok doesn't always set the last arg */ int tok = XmlContentTok(enc, s, end, &next); -#ifdef XML_DTD +#if XML_GE == 1 const char *accountAfter = ((tok == XML_TOK_TRAILING_RSQB) || (tok == XML_TOK_TRAILING_CR)) ? (haveMore ? s /* i.e. 0 bytes */ : end) @@ -2831,14 +2891,14 @@ XML_Char ch = (XML_Char)XmlPredefinedEntityName( enc, s + enc->minBytesPerChar, next - enc->minBytesPerChar); if (ch) { -#ifdef XML_DTD +#if XML_GE == 1 /* NOTE: We are replacing 4-6 characters original input for 1 character * so there is no amplification and hence recording without * protection. */ accountingDiffTolerated(parser, tok, (char *)&ch, ((char *)&ch) + sizeof(XML_Char), __LINE__, XML_ACCOUNT_ENTITY_EXPANSION); -#endif /* XML_DTD */ +#endif /* XML_GE == 1 */ if (parser->m_characterDataHandler) parser->m_characterDataHandler(parser->m_handlerArg, &ch, 1); else if (parser->m_defaultHandler) @@ -3039,13 +3099,13 @@ if (parser->m_ns && localPart) { /* localPart and prefix may have been overwritten in tag->name.str, since this points to the binding->uri - buffer which gets re-used; so we have to add them again + buffer which gets reused; so we have to add them again */ uri = (XML_Char *)tag->name.str + tag->name.uriLen; /* don't need to check for space - already done in storeAtts() */ while (*localPart) *uri++ = *localPart++; - prefix = (XML_Char *)tag->name.prefix; + prefix = tag->name.prefix; if (parser->m_ns_triplets && prefix) { *uri++ = parser->m_namespaceSeparator; while (*prefix) @@ -3112,7 +3172,7 @@ However, now we have a start/endCdataSectionHandler, so it seems easier to let the user deal with this. */ - else if (0 && parser->m_characterDataHandler) + else if ((0) && parser->m_characterDataHandler) parser->m_characterDataHandler(parser->m_handlerArg, parser->m_dataBuf, 0); /* END disabled code */ @@ -3141,8 +3201,8 @@ (int)(dataPtr - (ICHAR *)parser->m_dataBuf)); } else parser->m_characterDataHandler( - parser->m_handlerArg, (XML_Char *)s, - (int)((XML_Char *)end - (XML_Char *)s)); + parser->m_handlerArg, (const XML_Char *)s, + (int)((const XML_Char *)end - (const XML_Char *)s)); } else if (parser->m_defaultHandler) reportDefault(parser, enc, s, end); /* We are at the end of the final buffer, should we check for @@ -3175,8 +3235,8 @@ *eventPP = s; } } else - charDataHandler(parser->m_handlerArg, (XML_Char *)s, - (int)((XML_Char *)next - (XML_Char *)s)); + charDataHandler(parser->m_handlerArg, (const XML_Char *)s, + (int)((const XML_Char *)next - (const XML_Char *)s)); } else if (parser->m_defaultHandler) reportDefault(parser, enc, s, next); } break; @@ -4040,7 +4100,7 @@ for (;;) { const char *next = s; /* in case of XML_TOK_NONE or XML_TOK_PARTIAL */ int tok = XmlCdataSectionTok(enc, s, end, &next); -#ifdef XML_DTD +#if XML_GE == 1 if (! accountingDiffTolerated(parser, tok, s, next, __LINE__, account)) { accountingOnAbort(parser); return XML_ERROR_AMPLIFICATION_LIMIT_BREACH; @@ -4055,7 +4115,7 @@ parser->m_endCdataSectionHandler(parser->m_handlerArg); /* BEGIN disabled code */ /* see comment under XML_TOK_CDATA_SECT_OPEN */ - else if (0 && parser->m_characterDataHandler) + else if ((0) && parser->m_characterDataHandler) parser->m_characterDataHandler(parser->m_handlerArg, parser->m_dataBuf, 0); /* END disabled code */ @@ -4091,8 +4151,8 @@ *eventPP = s; } } else - charDataHandler(parser->m_handlerArg, (XML_Char *)s, - (int)((XML_Char *)next - (XML_Char *)s)); + charDataHandler(parser->m_handlerArg, (const XML_Char *)s, + (int)((const XML_Char *)next - (const XML_Char *)s)); } else if (parser->m_defaultHandler) reportDefault(parser, enc, s, next); } break; @@ -4192,7 +4252,7 @@ *eventPP = s; *startPtr = NULL; tok = XmlIgnoreSectionTok(enc, s, end, &next); -# ifdef XML_DTD +# if XML_GE == 1 if (! accountingDiffTolerated(parser, tok, s, next, __LINE__, XML_ACCOUNT_DIRECT)) { accountingOnAbort(parser); @@ -4284,7 +4344,7 @@ const XML_Char *storedversion = NULL; int standalone = -1; -#ifdef XML_DTD +#if XML_GE == 1 if (! accountingDiffTolerated(parser, XML_TOK_XML_DECL, s, next, __LINE__, XML_ACCOUNT_DIRECT)) { accountingOnAbort(parser); @@ -4482,16 +4542,16 @@ parser->m_processor = entityValueProcessor; return entityValueProcessor(parser, next, end, nextPtr); } - /* If we are at the end of the buffer, this would cause XmlPrologTok to - return XML_TOK_NONE on the next call, which would then cause the - function to exit with *nextPtr set to s - that is what we want for other - tokens, but not for the BOM - we would rather like to skip it; - then, when this routine is entered the next time, XmlPrologTok will - return XML_TOK_INVALID, since the BOM is still in the buffer + /* XmlPrologTok has now set the encoding based on the BOM it found, and we + must move s and nextPtr forward to consume the BOM. + + If we didn't, and got XML_TOK_NONE from the next XmlPrologTok call, we + would leave the BOM in the buffer and return. On the next call to this + function, our XmlPrologTok call would return XML_TOK_INVALID, since it + is not valid to have multiple BOMs. */ - else if (tok == XML_TOK_BOM && next == end - && ! parser->m_parsingStatus.finalBuffer) { -# ifdef XML_DTD + else if (tok == XML_TOK_BOM) { +# if XML_GE == 1 if (! accountingDiffTolerated(parser, tok, s, next, __LINE__, XML_ACCOUNT_DIRECT)) { accountingOnAbort(parser); @@ -4500,7 +4560,7 @@ # endif *nextPtr = next; - return XML_ERROR_NONE; + s = next; } /* If we get this token, we have the start of what might be a normal tag, but not a declaration (i.e. it doesn't begin with @@ -4707,11 +4767,13 @@ } } role = XmlTokenRole(&parser->m_prologState, tok, s, next, enc); -#ifdef XML_DTD +#if XML_GE == 1 switch (role) { case XML_ROLE_INSTANCE_START: // bytes accounted in contentProcessor case XML_ROLE_XML_DECL: // bytes accounted in processXmlDecl - case XML_ROLE_TEXT_DECL: // bytes accounted in processXmlDecl +# ifdef XML_DTD + case XML_ROLE_TEXT_DECL: // bytes accounted in processXmlDecl +# endif break; default: if (! accountingDiffTolerated(parser, tok, s, next, __LINE__, account)) { @@ -5029,6 +5091,9 @@ break; case XML_ROLE_ENTITY_VALUE: if (dtd->keepProcessing) { +#if XML_GE == 1 + // This will store the given replacement text in + // parser->m_declEntity->textPtr. enum XML_Error result = storeEntityValue(parser, enc, s + enc->minBytesPerChar, next - enc->minBytesPerChar, XML_ACCOUNT_NONE); @@ -5049,6 +5114,25 @@ poolDiscard(&dtd->entityValuePool); if (result != XML_ERROR_NONE) return result; +#else + // This will store "&entity123;" in parser->m_declEntity->textPtr + // to end up as "&entity123;" in the handler. + if (parser->m_declEntity != NULL) { + const enum XML_Error result + = storeSelfEntityValue(parser, parser->m_declEntity); + if (result != XML_ERROR_NONE) + return result; + + if (parser->m_entityDeclHandler) { + *eventEndPP = s; + parser->m_entityDeclHandler( + parser->m_handlerArg, parser->m_declEntity->name, + parser->m_declEntity->is_param, parser->m_declEntity->textPtr, + parser->m_declEntity->textLen, parser->m_curBase, 0, 0, 0); + handleDefault = XML_FALSE; + } + } +#endif } break; case XML_ROLE_DOCTYPE_SYSTEM_ID: @@ -5107,6 +5191,16 @@ } break; case XML_ROLE_ENTITY_COMPLETE: +#if XML_GE == 0 + // This will store "&entity123;" in entity->textPtr + // to end up as "&entity123;" in the handler. + if (parser->m_declEntity != NULL) { + const enum XML_Error result + = storeSelfEntityValue(parser, parser->m_declEntity); + if (result != XML_ERROR_NONE) + return result; + } +#endif if (dtd->keepProcessing && parser->m_declEntity && parser->m_entityDeclHandler) { *eventEndPP = s; @@ -5648,7 +5742,7 @@ for (;;) { const char *next = NULL; int tok = XmlPrologTok(parser->m_encoding, s, end, &next); -#ifdef XML_DTD +#if XML_GE == 1 if (! accountingDiffTolerated(parser, tok, s, next, __LINE__, XML_ACCOUNT_DIRECT)) { accountingOnAbort(parser); @@ -5728,7 +5822,7 @@ return XML_ERROR_NO_MEMORY; } entity->open = XML_TRUE; -#ifdef XML_DTD +#if XML_GE == 1 entityTrackingOnOpen(parser, entity, __LINE__); #endif entity->processed = 0; @@ -5761,10 +5855,10 @@ if (textEnd != next && parser->m_parsingStatus.parsing == XML_SUSPENDED) { entity->processed = (int)(next - textStart); parser->m_processor = internalEntityProcessor; - } else { -#ifdef XML_DTD + } else if (parser->m_openInternalEntities->entity == entity) { +#if XML_GE == 1 entityTrackingOnClose(parser, entity, __LINE__); -#endif /* XML_DTD */ +#endif /* XML_GE == 1 */ entity->open = XML_FALSE; parser->m_openInternalEntities = openEntity->next; /* put openEntity back in list of free instances */ @@ -5813,7 +5907,7 @@ return result; } -#ifdef XML_DTD +#if XML_GE == 1 entityTrackingOnClose(parser, entity, __LINE__); #endif entity->open = XML_FALSE; @@ -5892,7 +5986,7 @@ const char *next = ptr; /* XmlAttributeValueTok doesn't always set the last arg */ int tok = XmlAttributeValueTok(enc, ptr, end, &next); -#ifdef XML_DTD +#if XML_GE == 1 if (! accountingDiffTolerated(parser, tok, ptr, next, __LINE__, account)) { accountingOnAbort(parser); return XML_ERROR_AMPLIFICATION_LIMIT_BREACH; @@ -5957,14 +6051,14 @@ XML_Char ch = (XML_Char)XmlPredefinedEntityName( enc, ptr + enc->minBytesPerChar, next - enc->minBytesPerChar); if (ch) { -#ifdef XML_DTD +#if XML_GE == 1 /* NOTE: We are replacing 4-6 characters original input for 1 character * so there is no amplification and hence recording without * protection. */ accountingDiffTolerated(parser, tok, (char *)&ch, ((char *)&ch) + sizeof(XML_Char), __LINE__, XML_ACCOUNT_ENTITY_EXPANSION); -#endif /* XML_DTD */ +#endif /* XML_GE == 1 */ if (! poolAppendChar(pool, ch)) return XML_ERROR_NO_MEMORY; break; @@ -6042,14 +6136,14 @@ enum XML_Error result; const XML_Char *textEnd = entity->textPtr + entity->textLen; entity->open = XML_TRUE; -#ifdef XML_DTD +#if XML_GE == 1 entityTrackingOnOpen(parser, entity, __LINE__); #endif result = appendAttributeValue(parser, parser->m_internalEncoding, isCdata, (const char *)entity->textPtr, (const char *)textEnd, pool, XML_ACCOUNT_ENTITY_EXPANSION); -#ifdef XML_DTD +#if XML_GE == 1 entityTrackingOnClose(parser, entity, __LINE__); #endif entity->open = XML_FALSE; @@ -6079,6 +6173,7 @@ /* not reached */ } +#if XML_GE == 1 static enum XML_Error storeEntityValue(XML_Parser parser, const ENCODING *enc, const char *entityTextPtr, const char *entityTextEnd, @@ -6086,12 +6181,12 @@ DTD *const dtd = parser->m_dtd; /* save one level of indirection */ STRING_POOL *pool = &(dtd->entityValuePool); enum XML_Error result = XML_ERROR_NONE; -#ifdef XML_DTD +# ifdef XML_DTD int oldInEntityValue = parser->m_prologState.inEntityValue; parser->m_prologState.inEntityValue = 1; -#else +# else UNUSED_P(account); -#endif /* XML_DTD */ +# endif /* XML_DTD */ /* never return Null for the value argument in EntityDeclHandler, since this would indicate an external entity; therefore we have to make sure that entityValuePool.start is not null */ @@ -6105,18 +6200,16 @@ = entityTextPtr; /* XmlEntityValueTok doesn't always set the last arg */ int tok = XmlEntityValueTok(enc, entityTextPtr, entityTextEnd, &next); -#ifdef XML_DTD if (! accountingDiffTolerated(parser, tok, entityTextPtr, next, __LINE__, account)) { accountingOnAbort(parser); result = XML_ERROR_AMPLIFICATION_LIMIT_BREACH; goto endEntityValue; } -#endif switch (tok) { case XML_TOK_PARAM_ENTITY_REF: -#ifdef XML_DTD +# ifdef XML_DTD if (parser->m_isParamEntity || enc != parser->m_encoding) { const XML_Char *name; ENTITY *entity; @@ -6178,7 +6271,7 @@ } break; } -#endif /* XML_DTD */ +# endif /* XML_DTD */ /* In the internal subset, PE references are not legal within markup declarations, e.g entity values in this case. */ parser->m_eventPtr = entityTextPtr; @@ -6259,12 +6352,38 @@ entityTextPtr = next; } endEntityValue: -#ifdef XML_DTD +# ifdef XML_DTD parser->m_prologState.inEntityValue = oldInEntityValue; -#endif /* XML_DTD */ +# endif /* XML_DTD */ return result; } +#else /* XML_GE == 0 */ + +static enum XML_Error +storeSelfEntityValue(XML_Parser parser, ENTITY *entity) { + // This will store "&entity123;" in entity->textPtr + // to end up as "&entity123;" in the handler. + const char *const entity_start = "&"; + const char *const entity_end = ";"; + + STRING_POOL *const pool = &(parser->m_dtd->entityValuePool); + if (! poolAppendString(pool, entity_start) + || ! poolAppendString(pool, entity->name) + || ! poolAppendString(pool, entity_end)) { + poolDiscard(pool); + return XML_ERROR_NO_MEMORY; + } + + entity->textPtr = poolStart(pool); + entity->textLen = (int)(poolLength(pool)); + poolFinish(pool); + + return XML_ERROR_NONE; +} + +#endif /* XML_GE == 0 */ + static void FASTCALL normalizeLines(XML_Char *s) { XML_Char *p; @@ -6375,8 +6494,9 @@ } while ((convert_res != XML_CONVERT_COMPLETED) && (convert_res != XML_CONVERT_INPUT_INCOMPLETE)); } else - parser->m_defaultHandler(parser->m_handlerArg, (XML_Char *)s, - (int)((XML_Char *)end - (XML_Char *)s)); + parser->m_defaultHandler( + parser->m_handlerArg, (const XML_Char *)s, + (int)((const XML_Char *)end - (const XML_Char *)s)); } static int @@ -6480,7 +6600,7 @@ name = poolStoreString(&dtd->pool, enc, start, end); if (! name) return NULL; - /* skip quotation mark - its storage will be re-used (like in name[-1]) */ + /* skip quotation mark - its storage will be reused (like in name[-1]) */ ++name; id = (ATTRIBUTE_ID *)lookup(parser, &dtd->attributeIds, name, sizeof(ATTRIBUTE_ID)); @@ -6630,6 +6750,10 @@ static XML_Bool setContext(XML_Parser parser, const XML_Char *context) { + if (context == NULL) { + return XML_FALSE; + } + DTD *const dtd = parser->m_dtd; /* save one level of indirection */ const XML_Char *s = context; @@ -7220,7 +7344,7 @@ return NULL; for (;;) { const enum XML_Convert_Result convert_res = XmlConvert( - enc, &ptr, end, (ICHAR **)&(pool->ptr), (ICHAR *)pool->end); + enc, &ptr, end, (ICHAR **)&(pool->ptr), (const ICHAR *)pool->end); if ((convert_res == XML_CONVERT_COMPLETED) || (convert_res == XML_CONVERT_INPUT_INCOMPLETE)) break; @@ -7651,7 +7775,7 @@ return result; } -#ifdef XML_DTD +#if XML_GE == 1 static float accountingGetCurrentAmplification(XML_Parser rootParser) { @@ -7672,7 +7796,7 @@ const XML_Parser rootParser = getRootParserOf(originParser, NULL); assert(! rootParser->m_parentParser); - if (rootParser->m_accounting.debugLevel < 1) { + if (rootParser->m_accounting.debugLevel == 0u) { return; } @@ -7709,7 +7833,7 @@ /* Note: Performance is of no concern here */ const char *walker = before; - if ((rootParser->m_accounting.debugLevel >= 3) + if ((rootParser->m_accounting.debugLevel >= 3u) || (after - before) <= (ptrdiff_t)(contextLength + ellipsisLength + contextLength)) { for (; walker < after; walker++) { @@ -7774,7 +7898,7 @@ || (amplificationFactor <= rootParser->m_accounting.maximumAmplificationFactor); - if (rootParser->m_accounting.debugLevel >= 2) { + if (rootParser->m_accounting.debugLevel >= 2u) { accountingReportStats(rootParser, ""); accountingReportDiff(rootParser, levelsAwayFromRootParser, before, after, bytesMore, source_line, account); @@ -7801,7 +7925,7 @@ entityTrackingReportStats(XML_Parser rootParser, ENTITY *entity, const char *action, int sourceLine) { assert(! rootParser->m_parentParser); - if (rootParser->m_entity_stats.debugLevel < 1) + if (rootParser->m_entity_stats.debugLevel == 0u) return; # if defined(XML_UNICODE) @@ -8382,7 +8506,7 @@ assert(0); /* never gets here */ } -#endif /* XML_DTD */ +#endif /* XML_GE == 1 */ static unsigned long getDebugLevel(const char *variableName, unsigned long defaultDebugLevel) { @@ -8393,9 +8517,9 @@ const char *const value = valueOrNull; errno = 0; - char *afterValue = (char *)value; + char *afterValue = NULL; unsigned long debugLevel = strtoul(value, &afterValue, 10); - if ((errno != 0) || (afterValue[0] != '\0')) { + if ((errno != 0) || (afterValue == value) || (afterValue[0] != '\0')) { errno = 0; return defaultDebugLevel; } diff -Nru python3.11-3.11.8/Modules/expat/xmlrole.c python3.11-3.11.9/Modules/expat/xmlrole.c --- python3.11-3.11.8/Modules/expat/xmlrole.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/expat/xmlrole.c 2024-04-02 08:25:04.000000000 +0000 @@ -12,10 +12,10 @@ Copyright (c) 2002-2006 Karl Waclawek Copyright (c) 2002-2003 Fred L. Drake, Jr. Copyright (c) 2005-2009 Steven Solie - Copyright (c) 2016-2021 Sebastian Pipping + Copyright (c) 2016-2023 Sebastian Pipping Copyright (c) 2017 Rhodri James Copyright (c) 2019 David Loffredo - Copyright (c) 2021 Dong-hee Na + Copyright (c) 2021 Donghee Na Licensed under the MIT license: Permission is hereby granted, free of charge, to any person obtaining @@ -38,7 +38,7 @@ USE OR OTHER DEALINGS IN THE SOFTWARE. */ -#include +#include "expat_config.h" #include diff -Nru python3.11-3.11.8/Modules/expat/xmlrole.h python3.11-3.11.9/Modules/expat/xmlrole.h --- python3.11-3.11.8/Modules/expat/xmlrole.h 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/expat/xmlrole.h 2024-04-02 08:25:04.000000000 +0000 @@ -10,7 +10,7 @@ Copyright (c) 2000 Clark Cooper Copyright (c) 2002 Karl Waclawek Copyright (c) 2002 Fred L. Drake, Jr. - Copyright (c) 2017 Sebastian Pipping + Copyright (c) 2017-2024 Sebastian Pipping Licensed under the MIT license: Permission is hereby granted, free of charge, to any person obtaining @@ -127,9 +127,9 @@ #endif /* XML_DTD */ } PROLOG_STATE; -void XmlPrologStateInit(PROLOG_STATE *); +void XmlPrologStateInit(PROLOG_STATE *state); #ifdef XML_DTD -void XmlPrologStateInitExternalEntity(PROLOG_STATE *); +void XmlPrologStateInitExternalEntity(PROLOG_STATE *state); #endif /* XML_DTD */ #define XmlTokenRole(state, tok, ptr, end, enc) \ diff -Nru python3.11-3.11.8/Modules/expat/xmltok.c python3.11-3.11.9/Modules/expat/xmltok.c --- python3.11-3.11.8/Modules/expat/xmltok.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/expat/xmltok.c 2024-04-02 08:25:04.000000000 +0000 @@ -12,7 +12,7 @@ Copyright (c) 2002 Greg Stein Copyright (c) 2002-2016 Karl Waclawek Copyright (c) 2005-2009 Steven Solie - Copyright (c) 2016-2022 Sebastian Pipping + Copyright (c) 2016-2024 Sebastian Pipping Copyright (c) 2016 Pascal Cuoq Copyright (c) 2016 Don Lewis Copyright (c) 2017 Rhodri James @@ -20,8 +20,10 @@ Copyright (c) 2017 Benbuck Nason Copyright (c) 2017 José Gutiérrez de la Concha Copyright (c) 2019 David Loffredo - Copyright (c) 2021 Dong-hee Na + Copyright (c) 2021 Donghee Na Copyright (c) 2022 Martin Ettl + Copyright (c) 2022 Sean McBride + Copyright (c) 2023 Hanno Böck Licensed under the MIT license: Permission is hereby granted, free of charge, to any person obtaining @@ -44,7 +46,7 @@ USE OR OTHER DEALINGS IN THE SOFTWARE. */ -#include +#include "expat_config.h" #include #include /* memcpy */ @@ -76,7 +78,7 @@ #define VTABLE VTABLE1, PREFIX(toUtf8), PREFIX(toUtf16) #define UCS2_GET_NAMING(pages, hi, lo) \ - (namingBitmap[(pages[hi] << 3) + ((lo) >> 5)] & (1u << ((lo)&0x1F))) + (namingBitmap[(pages[hi] << 3) + ((lo) >> 5)] & (1u << ((lo) & 0x1F))) /* A 2 byte UTF-8 representation splits the characters 11 bits between the bottom 5 and 6 bits of the bytes. We need 8 bits to index into @@ -100,7 +102,7 @@ & (1u << (((byte)[2]) & 0x1F))) /* Detection of invalid UTF-8 sequences is based on Table 3.1B - of Unicode 3.2: http://www.unicode.org/unicode/reports/tr28/ + of Unicode 3.2: https://www.unicode.org/unicode/reports/tr28/ with the additional restriction of not allowing the Unicode code points 0xFFFF and 0xFFFE (sequences EF,BF,BF and EF,BF,BE). Implementation details: @@ -225,7 +227,7 @@ /* isNmstrt2 */ NULL, /* isNmstrt3 */ NULL, /* isNmstrt4 */ NULL, \ /* isInvalid2 */ NULL, /* isInvalid3 */ NULL, /* isInvalid4 */ NULL -static int FASTCALL checkCharRefNumber(int); +static int FASTCALL checkCharRefNumber(int result); #include "xmltok_impl.h" #include "ascii.h" @@ -243,7 +245,7 @@ #endif #define SB_BYTE_TYPE(enc, p) \ - (((struct normal_encoding *)(enc))->type[(unsigned char)*(p)]) + (((const struct normal_encoding *)(enc))->type[(unsigned char)*(p)]) #ifdef XML_MIN_SIZE static int PTRFASTCALL @@ -407,7 +409,7 @@ unsigned short *to = *toP; const char *from = *fromP; while (from < fromLim && to < toLim) { - switch (((struct normal_encoding *)enc)->type[(unsigned char)*from]) { + switch (SB_BYTE_TYPE(enc, from)) { case BT_LEAD2: if (fromLim - from < 2) { res = XML_CONVERT_INPUT_INCOMPLETE; @@ -715,31 +717,26 @@ return res; \ } -#define SET2(ptr, ch) (((ptr)[0] = ((ch)&0xff)), ((ptr)[1] = ((ch) >> 8))) #define GET_LO(ptr) ((unsigned char)(ptr)[0]) #define GET_HI(ptr) ((unsigned char)(ptr)[1]) DEFINE_UTF16_TO_UTF8(little2_) DEFINE_UTF16_TO_UTF16(little2_) -#undef SET2 #undef GET_LO #undef GET_HI -#define SET2(ptr, ch) (((ptr)[0] = ((ch) >> 8)), ((ptr)[1] = ((ch)&0xFF))) #define GET_LO(ptr) ((unsigned char)(ptr)[1]) #define GET_HI(ptr) ((unsigned char)(ptr)[0]) DEFINE_UTF16_TO_UTF8(big2_) DEFINE_UTF16_TO_UTF16(big2_) -#undef SET2 #undef GET_LO #undef GET_HI #define LITTLE2_BYTE_TYPE(enc, p) \ - ((p)[1] == 0 ? ((struct normal_encoding *)(enc))->type[(unsigned char)*(p)] \ - : unicode_byte_type((p)[1], (p)[0])) + ((p)[1] == 0 ? SB_BYTE_TYPE(enc, p) : unicode_byte_type((p)[1], (p)[0])) #define LITTLE2_BYTE_TO_ASCII(p) ((p)[1] == 0 ? (p)[0] : -1) #define LITTLE2_CHAR_MATCHES(p, c) ((p)[1] == 0 && (p)[0] == (c)) #define LITTLE2_IS_NAME_CHAR_MINBPC(p) \ @@ -872,9 +869,7 @@ #endif #define BIG2_BYTE_TYPE(enc, p) \ - ((p)[0] == 0 \ - ? ((struct normal_encoding *)(enc))->type[(unsigned char)(p)[1]] \ - : unicode_byte_type((p)[0], (p)[1])) + ((p)[0] == 0 ? SB_BYTE_TYPE(enc, p + 1) : unicode_byte_type((p)[0], (p)[1])) #define BIG2_BYTE_TO_ASCII(p) ((p)[0] == 0 ? (p)[1] : -1) #define BIG2_CHAR_MATCHES(p, c) ((p)[0] == 0 && (p)[1] == (c)) #define BIG2_IS_NAME_CHAR_MINBPC(p) \ diff -Nru python3.11-3.11.8/Modules/expat/xmltok.h python3.11-3.11.9/Modules/expat/xmltok.h --- python3.11-3.11.8/Modules/expat/xmltok.h 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/expat/xmltok.h 2024-04-02 08:25:04.000000000 +0000 @@ -10,7 +10,7 @@ Copyright (c) 2000 Clark Cooper Copyright (c) 2002 Fred L. Drake, Jr. Copyright (c) 2002-2005 Karl Waclawek - Copyright (c) 2016-2017 Sebastian Pipping + Copyright (c) 2016-2024 Sebastian Pipping Copyright (c) 2017 Rhodri James Licensed under the MIT license: @@ -289,7 +289,8 @@ const char **encodingNamePtr, const ENCODING **namedEncodingPtr, int *standalonePtr); -int XmlInitEncoding(INIT_ENCODING *, const ENCODING **, const char *name); +int XmlInitEncoding(INIT_ENCODING *p, const ENCODING **encPtr, + const char *name); const ENCODING *XmlGetUtf8InternalEncoding(void); const ENCODING *XmlGetUtf16InternalEncoding(void); int FASTCALL XmlUtf8Encode(int charNumber, char *buf); @@ -307,7 +308,8 @@ const char **encodingNamePtr, const ENCODING **namedEncodingPtr, int *standalonePtr); -int XmlInitEncodingNS(INIT_ENCODING *, const ENCODING **, const char *name); +int XmlInitEncodingNS(INIT_ENCODING *p, const ENCODING **encPtr, + const char *name); const ENCODING *XmlGetUtf8InternalEncodingNS(void); const ENCODING *XmlGetUtf16InternalEncodingNS(void); ENCODING *XmlInitUnknownEncodingNS(void *mem, int *table, CONVERTER convert, diff -Nru python3.11-3.11.8/Modules/expat/xmltok_impl.c python3.11-3.11.9/Modules/expat/xmltok_impl.c --- python3.11-3.11.8/Modules/expat/xmltok_impl.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/expat/xmltok_impl.c 2024-04-02 08:25:04.000000000 +0000 @@ -126,7 +126,7 @@ # endif # define HAS_CHARS(enc, ptr, end, count) \ - ((end) - (ptr) >= ((count)*MINBPC(enc))) + ((end) - (ptr) >= ((count) * MINBPC(enc))) # define HAS_CHAR(enc, ptr, end) HAS_CHARS(enc, ptr, end, 1) diff -Nru python3.11-3.11.8/Modules/getpath.c python3.11-3.11.9/Modules/getpath.c --- python3.11-3.11.8/Modules/getpath.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/getpath.c 2024-04-02 08:25:04.000000000 +0000 @@ -265,6 +265,10 @@ } /* Convert all parts to wchar and accumulate max final length */ wchar_t **parts = (wchar_t **)PyMem_Malloc(n * sizeof(wchar_t *)); + if (parts == NULL) { + PyErr_NoMemory(); + return NULL; + } memset(parts, 0, n * sizeof(wchar_t *)); Py_ssize_t cchFinal = 0; Py_ssize_t first = 0; diff -Nru python3.11-3.11.8/Modules/overlapped.c python3.11-3.11.9/Modules/overlapped.c --- python3.11-3.11.8/Modules/overlapped.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/overlapped.c 2024-04-02 08:25:04.000000000 +0000 @@ -717,6 +717,24 @@ if (!HasOverlappedIoCompleted(&self->overlapped) && self->type != TYPE_NOT_STARTED) { + // NOTE: We should not get here, if we do then something is wrong in + // the IocpProactor or ProactorEventLoop. Since everything uses IOCP if + // the overlapped IO hasn't completed yet then we should not be + // deallocating! + // + // The problem is likely that this OverlappedObject was removed from + // the IocpProactor._cache before it was complete. The _cache holds a + // reference while IO is pending so that it does not get deallocated + // while the kernel has retained the OVERLAPPED structure. + // + // CancelIoEx (likely called from self.cancel()) may have successfully + // completed, but the OVERLAPPED is still in use until either + // HasOverlappedIoCompleted() is true or GetQueuedCompletionStatus has + // returned this OVERLAPPED object. + // + // NOTE: Waiting when IOCP is in use can hang indefinitely, but this + // CancelIoEx is superfluous in that self.cancel() was already called, + // so I've only ever seen this return FALSE with GLE=ERROR_NOT_FOUND Py_BEGIN_ALLOW_THREADS if (CancelIoEx(self->handle, &self->overlapped)) wait = TRUE; @@ -2074,6 +2092,7 @@ WINAPI_CONSTANT(F_DWORD, ERROR_OPERATION_ABORTED); WINAPI_CONSTANT(F_DWORD, ERROR_SEM_TIMEOUT); WINAPI_CONSTANT(F_DWORD, ERROR_PIPE_BUSY); + WINAPI_CONSTANT(F_DWORD, ERROR_PORT_UNREACHABLE); WINAPI_CONSTANT(F_DWORD, INFINITE); WINAPI_CONSTANT(F_HANDLE, INVALID_HANDLE_VALUE); WINAPI_CONSTANT(F_HANDLE, NULL); diff -Nru python3.11-3.11.8/Modules/posixmodule.c python3.11-3.11.9/Modules/posixmodule.c --- python3.11-3.11.8/Modules/posixmodule.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/posixmodule.c 2024-04-02 08:25:04.000000000 +0000 @@ -8392,36 +8392,39 @@ if (!result) return NULL; + int pos = 0; + #ifndef doubletime #define doubletime(TV) ((double)(TV).tv_sec + (TV).tv_usec * 0.000001) #endif - PyStructSequence_SET_ITEM(result, 0, - PyFloat_FromDouble(doubletime(ru->ru_utime))); - PyStructSequence_SET_ITEM(result, 1, - PyFloat_FromDouble(doubletime(ru->ru_stime))); -#define SET_INT(result, index, value)\ - PyStructSequence_SET_ITEM(result, index, PyLong_FromLong(value)) - SET_INT(result, 2, ru->ru_maxrss); - SET_INT(result, 3, ru->ru_ixrss); - SET_INT(result, 4, ru->ru_idrss); - SET_INT(result, 5, ru->ru_isrss); - SET_INT(result, 6, ru->ru_minflt); - SET_INT(result, 7, ru->ru_majflt); - SET_INT(result, 8, ru->ru_nswap); - SET_INT(result, 9, ru->ru_inblock); - SET_INT(result, 10, ru->ru_oublock); - SET_INT(result, 11, ru->ru_msgsnd); - SET_INT(result, 12, ru->ru_msgrcv); - SET_INT(result, 13, ru->ru_nsignals); - SET_INT(result, 14, ru->ru_nvcsw); - SET_INT(result, 15, ru->ru_nivcsw); -#undef SET_INT - - if (PyErr_Occurred()) { - Py_DECREF(result); - return NULL; - } +#define SET_RESULT(CALL) \ + do { \ + PyObject *item = (CALL); \ + if (item == NULL) { \ + Py_DECREF(result); \ + return NULL; \ + } \ + PyStructSequence_SET_ITEM(result, pos++, item); \ + } while(0) + + SET_RESULT(PyFloat_FromDouble(doubletime(ru->ru_utime))); + SET_RESULT(PyFloat_FromDouble(doubletime(ru->ru_stime))); + SET_RESULT(PyLong_FromLong(ru->ru_maxrss)); + SET_RESULT(PyLong_FromLong(ru->ru_ixrss)); + SET_RESULT(PyLong_FromLong(ru->ru_idrss)); + SET_RESULT(PyLong_FromLong(ru->ru_isrss)); + SET_RESULT(PyLong_FromLong(ru->ru_minflt)); + SET_RESULT(PyLong_FromLong(ru->ru_majflt)); + SET_RESULT(PyLong_FromLong(ru->ru_nswap)); + SET_RESULT(PyLong_FromLong(ru->ru_inblock)); + SET_RESULT(PyLong_FromLong(ru->ru_oublock)); + SET_RESULT(PyLong_FromLong(ru->ru_msgsnd)); + SET_RESULT(PyLong_FromLong(ru->ru_msgrcv)); + SET_RESULT(PyLong_FromLong(ru->ru_nsignals)); + SET_RESULT(PyLong_FromLong(ru->ru_nvcsw)); + SET_RESULT(PyLong_FromLong(ru->ru_nivcsw)); +#undef SET_RESULT return Py_BuildValue("NiN", PyLong_FromPid(pid), status, result); } @@ -8544,15 +8547,25 @@ if (!result) return NULL; - PyStructSequence_SET_ITEM(result, 0, PyLong_FromPid(si.si_pid)); - PyStructSequence_SET_ITEM(result, 1, _PyLong_FromUid(si.si_uid)); - PyStructSequence_SET_ITEM(result, 2, PyLong_FromLong((long)(si.si_signo))); - PyStructSequence_SET_ITEM(result, 3, PyLong_FromLong((long)(si.si_status))); - PyStructSequence_SET_ITEM(result, 4, PyLong_FromLong((long)(si.si_code))); - if (PyErr_Occurred()) { - Py_DECREF(result); - return NULL; - } + int pos = 0; + +#define SET_RESULT(CALL) \ + do { \ + PyObject *item = (CALL); \ + if (item == NULL) { \ + Py_DECREF(result); \ + return NULL; \ + } \ + PyStructSequence_SET_ITEM(result, pos++, item); \ + } while(0) + + SET_RESULT(PyLong_FromPid(si.si_pid)); + SET_RESULT(_PyLong_FromUid(si.si_uid)); + SET_RESULT(PyLong_FromLong((long)(si.si_signo))); + SET_RESULT(PyLong_FromLong((long)(si.si_status))); + SET_RESULT(PyLong_FromLong((long)(si.si_code))); + +#undef SET_RESULT return result; } @@ -11450,46 +11463,50 @@ if (v == NULL) return NULL; + int pos = 0; + +#define SET_RESULT(CALL) \ + do { \ + PyObject *item = (CALL); \ + if (item == NULL) { \ + Py_DECREF(v); \ + return NULL; \ + } \ + PyStructSequence_SET_ITEM(v, pos++, item); \ + } while(0) + #if !defined(HAVE_LARGEFILE_SUPPORT) - PyStructSequence_SET_ITEM(v, 0, PyLong_FromLong((long) st.f_bsize)); - PyStructSequence_SET_ITEM(v, 1, PyLong_FromLong((long) st.f_frsize)); - PyStructSequence_SET_ITEM(v, 2, PyLong_FromLong((long) st.f_blocks)); - PyStructSequence_SET_ITEM(v, 3, PyLong_FromLong((long) st.f_bfree)); - PyStructSequence_SET_ITEM(v, 4, PyLong_FromLong((long) st.f_bavail)); - PyStructSequence_SET_ITEM(v, 5, PyLong_FromLong((long) st.f_files)); - PyStructSequence_SET_ITEM(v, 6, PyLong_FromLong((long) st.f_ffree)); - PyStructSequence_SET_ITEM(v, 7, PyLong_FromLong((long) st.f_favail)); - PyStructSequence_SET_ITEM(v, 8, PyLong_FromLong((long) st.f_flag)); - PyStructSequence_SET_ITEM(v, 9, PyLong_FromLong((long) st.f_namemax)); + SET_RESULT(PyLong_FromLong((long) st.f_bsize)); + SET_RESULT(PyLong_FromLong((long) st.f_frsize)); + SET_RESULT(PyLong_FromLong((long) st.f_blocks)); + SET_RESULT(PyLong_FromLong((long) st.f_bfree)); + SET_RESULT(PyLong_FromLong((long) st.f_bavail)); + SET_RESULT(PyLong_FromLong((long) st.f_files)); + SET_RESULT(PyLong_FromLong((long) st.f_ffree)); + SET_RESULT(PyLong_FromLong((long) st.f_favail)); + SET_RESULT(PyLong_FromLong((long) st.f_flag)); + SET_RESULT(PyLong_FromLong((long) st.f_namemax)); #else - PyStructSequence_SET_ITEM(v, 0, PyLong_FromLong((long) st.f_bsize)); - PyStructSequence_SET_ITEM(v, 1, PyLong_FromLong((long) st.f_frsize)); - PyStructSequence_SET_ITEM(v, 2, - PyLong_FromLongLong((long long) st.f_blocks)); - PyStructSequence_SET_ITEM(v, 3, - PyLong_FromLongLong((long long) st.f_bfree)); - PyStructSequence_SET_ITEM(v, 4, - PyLong_FromLongLong((long long) st.f_bavail)); - PyStructSequence_SET_ITEM(v, 5, - PyLong_FromLongLong((long long) st.f_files)); - PyStructSequence_SET_ITEM(v, 6, - PyLong_FromLongLong((long long) st.f_ffree)); - PyStructSequence_SET_ITEM(v, 7, - PyLong_FromLongLong((long long) st.f_favail)); - PyStructSequence_SET_ITEM(v, 8, PyLong_FromLong((long) st.f_flag)); - PyStructSequence_SET_ITEM(v, 9, PyLong_FromLong((long) st.f_namemax)); + SET_RESULT(PyLong_FromLong((long) st.f_bsize)); + SET_RESULT(PyLong_FromLong((long) st.f_frsize)); + SET_RESULT(PyLong_FromLongLong((long long) st.f_blocks)); + SET_RESULT(PyLong_FromLongLong((long long) st.f_bfree)); + SET_RESULT(PyLong_FromLongLong((long long) st.f_bavail)); + SET_RESULT(PyLong_FromLongLong((long long) st.f_files)); + SET_RESULT(PyLong_FromLongLong((long long) st.f_ffree)); + SET_RESULT(PyLong_FromLongLong((long long) st.f_favail)); + SET_RESULT(PyLong_FromLong((long) st.f_flag)); + SET_RESULT(PyLong_FromLong((long) st.f_namemax)); #endif /* The _ALL_SOURCE feature test macro defines f_fsid as a structure * (issue #32390). */ #if defined(_AIX) && defined(_ALL_SOURCE) - PyStructSequence_SET_ITEM(v, 10, PyLong_FromUnsignedLong(st.f_fsid.val[0])); + SET_RESULT(PyLong_FromUnsignedLong(st.f_fsid.val[0])); #else - PyStructSequence_SET_ITEM(v, 10, PyLong_FromUnsignedLong(st.f_fsid)); + SET_RESULT(PyLong_FromUnsignedLong(st.f_fsid)); #endif - if (PyErr_Occurred()) { - Py_DECREF(v); - return NULL; - } + +#undef SET_RESULT return v; } @@ -13434,12 +13451,23 @@ termsize = PyStructSequence_New((PyTypeObject *)TerminalSizeType); if (termsize == NULL) return NULL; - PyStructSequence_SET_ITEM(termsize, 0, PyLong_FromLong(columns)); - PyStructSequence_SET_ITEM(termsize, 1, PyLong_FromLong(lines)); - if (PyErr_Occurred()) { - Py_DECREF(termsize); - return NULL; - } + + int pos = 0; + +#define SET_TERMSIZE(CALL) \ + do { \ + PyObject *item = (CALL); \ + if (item == NULL) { \ + Py_DECREF(termsize); \ + return NULL; \ + } \ + PyStructSequence_SET_ITEM(termsize, pos++, item); \ + } while(0) + + SET_TERMSIZE(PyLong_FromLong(columns)); + SET_TERMSIZE(PyLong_FromLong(lines)); +#undef SET_TERMSIZE + return termsize; } #endif /* defined(TERMSIZE_USE_CONIO) || defined(TERMSIZE_USE_IOCTL) */ diff -Nru python3.11-3.11.8/Modules/pwdmodule.c python3.11-3.11.9/Modules/pwdmodule.c --- python3.11-3.11.8/Modules/pwdmodule.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/pwdmodule.c 2024-04-02 08:25:04.000000000 +0000 @@ -63,53 +63,52 @@ #define DEFAULT_BUFFER_SIZE 1024 -static void -sets(PyObject *v, int i, const char* val) -{ - if (val) { - PyObject *o = PyUnicode_DecodeFSDefault(val); - PyStructSequence_SET_ITEM(v, i, o); - } - else { - PyStructSequence_SET_ITEM(v, i, Py_None); - Py_INCREF(Py_None); - } -} - static PyObject * mkpwent(PyObject *module, struct passwd *p) { - int setIndex = 0; PyObject *v = PyStructSequence_New(get_pwd_state(module)->StructPwdType); - if (v == NULL) + if (v == NULL) { return NULL; + } + + int setIndex = 0; + +#define SET_STRING(VAL) \ + SET_RESULT((VAL) ? PyUnicode_DecodeFSDefault((VAL)) : Py_NewRef(Py_None)) -#define SETS(i,val) sets(v, i, val) +#define SET_RESULT(CALL) \ + do { \ + PyObject *item = (CALL); \ + if (item == NULL) { \ + goto error; \ + } \ + PyStructSequence_SET_ITEM(v, setIndex++, item); \ + } while(0) - SETS(setIndex++, p->pw_name); + SET_STRING(p->pw_name); #if defined(HAVE_STRUCT_PASSWD_PW_PASSWD) && !defined(__ANDROID__) - SETS(setIndex++, p->pw_passwd); + SET_STRING(p->pw_passwd); #else - SETS(setIndex++, ""); + SET_STRING(""); #endif - PyStructSequence_SET_ITEM(v, setIndex++, _PyLong_FromUid(p->pw_uid)); - PyStructSequence_SET_ITEM(v, setIndex++, _PyLong_FromGid(p->pw_gid)); + SET_RESULT(_PyLong_FromUid(p->pw_uid)); + SET_RESULT(_PyLong_FromGid(p->pw_gid)); #if defined(HAVE_STRUCT_PASSWD_PW_GECOS) - SETS(setIndex++, p->pw_gecos); + SET_STRING(p->pw_gecos); #else - SETS(setIndex++, ""); + SET_STRING(""); #endif - SETS(setIndex++, p->pw_dir); - SETS(setIndex++, p->pw_shell); - -#undef SETS + SET_STRING(p->pw_dir); + SET_STRING(p->pw_shell); - if (PyErr_Occurred()) { - Py_XDECREF(v); - return NULL; - } +#undef SET_STRING +#undef SET_RESULT return v; + +error: + Py_DECREF(v); + return NULL; } /*[clinic input] diff -Nru python3.11-3.11.8/Modules/pyexpat.c python3.11-3.11.9/Modules/pyexpat.c --- python3.11-3.11.8/Modules/pyexpat.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Modules/pyexpat.c 2024-04-02 08:25:04.000000000 +0000 @@ -1,6 +1,7 @@ #include "Python.h" #include +#include #include "structmember.h" // PyMemberDef #include "expat.h" @@ -76,6 +77,12 @@ /* NULL if not enabled */ int buffer_size; /* Size of buffer, in XML_Char units */ int buffer_used; /* Buffer units in use */ + bool reparse_deferral_enabled; /* Whether to defer reparsing of + unfinished XML tokens; a de-facto cache of + what Expat has the authority on, for lack + of a getter API function + "XML_GetReparseDeferralEnabled" in Expat + 2.6.0 */ PyObject *intern; /* Dictionary to intern strings */ PyObject **handlers; } xmlparseobject; @@ -706,6 +713,40 @@ #define MAX_CHUNK_SIZE (1 << 20) /*[clinic input] +pyexpat.xmlparser.SetReparseDeferralEnabled + + enabled: bool + / + +Enable/Disable reparse deferral; enabled by default with Expat >=2.6.0. +[clinic start generated code]*/ + +static PyObject * +pyexpat_xmlparser_SetReparseDeferralEnabled_impl(xmlparseobject *self, + int enabled) +/*[clinic end generated code: output=5ec539e3b63c8c49 input=021eb9e0bafc32c5]*/ +{ +#if XML_COMBINED_VERSION >= 20600 + XML_SetReparseDeferralEnabled(self->itself, enabled ? XML_TRUE : XML_FALSE); + self->reparse_deferral_enabled = (bool)enabled; +#endif + Py_RETURN_NONE; +} + +/*[clinic input] +pyexpat.xmlparser.GetReparseDeferralEnabled + +Retrieve reparse deferral enabled status; always returns false with Expat <2.6.0. +[clinic start generated code]*/ + +static PyObject * +pyexpat_xmlparser_GetReparseDeferralEnabled_impl(xmlparseobject *self) +/*[clinic end generated code: output=4e91312e88a595a8 input=54b5f11d32b20f3e]*/ +{ + return PyBool_FromLong(self->reparse_deferral_enabled); +} + +/*[clinic input] pyexpat.xmlparser.Parse cls: defining_class @@ -1067,6 +1108,8 @@ #if XML_COMBINED_VERSION >= 19505 PYEXPAT_XMLPARSER_USEFOREIGNDTD_METHODDEF #endif + PYEXPAT_XMLPARSER_SETREPARSEDEFERRALENABLED_METHODDEF + PYEXPAT_XMLPARSER_GETREPARSEDEFERRALENABLED_METHODDEF {NULL, NULL} /* sentinel */ }; @@ -1150,6 +1193,11 @@ self->handlers = NULL; self->intern = intern; Py_XINCREF(self->intern); +#if XML_COMBINED_VERSION >= 20600 + self->reparse_deferral_enabled = true; +#else + self->reparse_deferral_enabled = false; +#endif /* namespace_separator is either NULL or contains one char + \0 */ self->itself = XML_ParserCreate_MM(encoding, &ExpatMemoryHandler, @@ -2005,6 +2053,11 @@ #else capi.SetHashSalt = NULL; #endif +#if XML_COMBINED_VERSION >= 20600 + capi.SetReparseDeferralEnabled = XML_SetReparseDeferralEnabled; +#else + capi.SetReparseDeferralEnabled = NULL; +#endif /* export using capsule */ PyObject *capi_object = PyCapsule_New(&capi, PyExpat_CAPSULE_NAME, NULL); diff -Nru python3.11-3.11.8/Objects/descrobject.c python3.11-3.11.9/Objects/descrobject.c --- python3.11-3.11.8/Objects/descrobject.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Objects/descrobject.c 2024-04-02 08:25:04.000000000 +0000 @@ -1700,15 +1700,12 @@ return NULL; if (get == NULL || get == Py_None) { - Py_XDECREF(get); get = pold->prop_get ? pold->prop_get : Py_None; } if (set == NULL || set == Py_None) { - Py_XDECREF(set); set = pold->prop_set ? pold->prop_set : Py_None; } if (del == NULL || del == Py_None) { - Py_XDECREF(del); del = pold->prop_del ? pold->prop_del : Py_None; } if (pold->getter_doc && get != Py_None) { diff -Nru python3.11-3.11.8/Objects/floatobject.c python3.11-3.11.9/Objects/floatobject.c --- python3.11-3.11.8/Objects/floatobject.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Objects/floatobject.c 2024-04-02 08:25:04.000000000 +0000 @@ -101,10 +101,18 @@ return NULL; } -#define SetIntFlag(flag) \ - PyStructSequence_SET_ITEM(floatinfo, pos++, PyLong_FromLong(flag)) -#define SetDblFlag(flag) \ - PyStructSequence_SET_ITEM(floatinfo, pos++, PyFloat_FromDouble(flag)) +#define SetFlag(CALL) \ + do { \ + PyObject *flag = (CALL); \ + if (flag == NULL) { \ + Py_CLEAR(floatinfo); \ + return NULL; \ + } \ + PyStructSequence_SET_ITEM(floatinfo, pos++, flag); \ + } while (0) + +#define SetIntFlag(FLAG) SetFlag(PyLong_FromLong((FLAG))) +#define SetDblFlag(FLAG) SetFlag(PyFloat_FromDouble((FLAG))) SetDblFlag(DBL_MAX); SetIntFlag(DBL_MAX_EXP); @@ -119,11 +127,8 @@ SetIntFlag(FLT_ROUNDS); #undef SetIntFlag #undef SetDblFlag +#undef SetFlag - if (PyErr_Occurred()) { - Py_CLEAR(floatinfo); - return NULL; - } return floatinfo; } diff -Nru python3.11-3.11.8/Objects/listobject.c python3.11-3.11.9/Objects/listobject.c --- python3.11-3.11.8/Objects/listobject.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Objects/listobject.c 2024-04-02 08:25:04.000000000 +0000 @@ -3451,6 +3451,7 @@ listiter_reduce_general(void *_it, int forward) { PyObject *list; + PyObject *iter; /* _PyEval_GetBuiltin can invoke arbitrary code, * call must be before access of iterator pointers. @@ -3458,7 +3459,7 @@ /* the objects are not the same, index is of different types! */ if (forward) { - PyObject *iter = _PyEval_GetBuiltin(&_Py_ID(iter)); + iter = _PyEval_GetBuiltin(&_Py_ID(iter)); if (!iter) { return NULL; } @@ -3466,21 +3467,19 @@ if (it->it_seq) { return Py_BuildValue("N(O)n", iter, it->it_seq, it->it_index); } - Py_DECREF(iter); } else { - PyObject *reversed = _PyEval_GetBuiltin(&_Py_ID(reversed)); - if (!reversed) { + iter = _PyEval_GetBuiltin(&_Py_ID(reversed)); + if (!iter) { return NULL; } listreviterobject *it = (listreviterobject *)_it; if (it->it_seq) { - return Py_BuildValue("N(O)n", reversed, it->it_seq, it->it_index); + return Py_BuildValue("N(O)n", iter, it->it_seq, it->it_index); } - Py_DECREF(reversed); } /* empty iterator, create an empty list */ list = PyList_New(0); if (list == NULL) return NULL; - return Py_BuildValue("N(N)", _PyEval_GetBuiltin(&_Py_ID(iter)), list); + return Py_BuildValue("N(N)", iter, list); } diff -Nru python3.11-3.11.8/Objects/longobject.c python3.11-3.11.9/Objects/longobject.c --- python3.11-3.11.8/Objects/longobject.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Objects/longobject.c 2024-04-02 08:25:04.000000000 +0000 @@ -1717,7 +1717,9 @@ digit *pout, *pin, rem, tenpow; int negative; int d; - enum PyUnicode_Kind kind; + + // writer or bytes_writer can be used, but not both at the same time. + assert(writer == NULL || bytes_writer == NULL); a = (PyLongObject *)aa; if (a == NULL || !PyLong_Check(a)) { @@ -1819,7 +1821,6 @@ Py_DECREF(scratch); return -1; } - kind = writer->kind; } else if (bytes_writer) { *bytes_str = _PyBytesWriter_Prepare(bytes_writer, *bytes_str, strlen); @@ -1834,7 +1835,6 @@ Py_DECREF(scratch); return -1; } - kind = PyUnicode_KIND(str); } #define WRITE_DIGITS(p) \ @@ -1882,19 +1882,23 @@ WRITE_DIGITS(p); assert(p == *bytes_str); } - else if (kind == PyUnicode_1BYTE_KIND) { - Py_UCS1 *p; - WRITE_UNICODE_DIGITS(Py_UCS1); - } - else if (kind == PyUnicode_2BYTE_KIND) { - Py_UCS2 *p; - WRITE_UNICODE_DIGITS(Py_UCS2); - } else { - Py_UCS4 *p; - assert (kind == PyUnicode_4BYTE_KIND); - WRITE_UNICODE_DIGITS(Py_UCS4); + enum PyUnicode_Kind kind = writer ? writer->kind : PyUnicode_KIND(str); + if (kind == PyUnicode_1BYTE_KIND) { + Py_UCS1 *p; + WRITE_UNICODE_DIGITS(Py_UCS1); + } + else if (kind == PyUnicode_2BYTE_KIND) { + Py_UCS2 *p; + WRITE_UNICODE_DIGITS(Py_UCS2); + } + else { + assert (kind == PyUnicode_4BYTE_KIND); + Py_UCS4 *p; + WRITE_UNICODE_DIGITS(Py_UCS4); + } } + #undef WRITE_DIGITS #undef WRITE_UNICODE_DIGITS @@ -1935,11 +1939,12 @@ PyObject *v = NULL; Py_ssize_t sz; Py_ssize_t size_a; - enum PyUnicode_Kind kind; int negative; int bits; assert(base == 2 || base == 8 || base == 16); + // writer or bytes_writer can be used, but not both at the same time. + assert(writer == NULL || bytes_writer == NULL); if (a == NULL || !PyLong_Check(a)) { PyErr_BadInternalCall(); return -1; @@ -1987,7 +1992,6 @@ if (writer) { if (_PyUnicodeWriter_Prepare(writer, sz, 'x') == -1) return -1; - kind = writer->kind; } else if (bytes_writer) { *bytes_str = _PyBytesWriter_Prepare(bytes_writer, *bytes_str, sz); @@ -1998,7 +2002,6 @@ v = PyUnicode_New(sz, 'x'); if (v == NULL) return -1; - kind = PyUnicode_KIND(v); } #define WRITE_DIGITS(p) \ @@ -2059,19 +2062,23 @@ WRITE_DIGITS(p); assert(p == *bytes_str); } - else if (kind == PyUnicode_1BYTE_KIND) { - Py_UCS1 *p; - WRITE_UNICODE_DIGITS(Py_UCS1); - } - else if (kind == PyUnicode_2BYTE_KIND) { - Py_UCS2 *p; - WRITE_UNICODE_DIGITS(Py_UCS2); - } else { - Py_UCS4 *p; - assert (kind == PyUnicode_4BYTE_KIND); - WRITE_UNICODE_DIGITS(Py_UCS4); + enum PyUnicode_Kind kind = writer ? writer->kind : PyUnicode_KIND(v); + if (kind == PyUnicode_1BYTE_KIND) { + Py_UCS1 *p; + WRITE_UNICODE_DIGITS(Py_UCS1); + } + else if (kind == PyUnicode_2BYTE_KIND) { + Py_UCS2 *p; + WRITE_UNICODE_DIGITS(Py_UCS2); + } + else { + assert (kind == PyUnicode_4BYTE_KIND); + Py_UCS4 *p; + WRITE_UNICODE_DIGITS(Py_UCS4); + } } + #undef WRITE_DIGITS #undef WRITE_UNICODE_DIGITS diff -Nru python3.11-3.11.8/Objects/typeobject.c python3.11-3.11.9/Objects/typeobject.c --- python3.11-3.11.8/Objects/typeobject.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Objects/typeobject.c 2024-04-02 08:25:04.000000000 +0000 @@ -5327,6 +5327,7 @@ } else { /* args == NULL */ + Py_DECREF(copyreg); Py_DECREF(kwargs); PyErr_BadInternalCall(); return NULL; diff -Nru python3.11-3.11.8/PC/launcher2.c python3.11-3.11.9/PC/launcher2.c --- python3.11-3.11.8/PC/launcher2.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/PC/launcher2.c 2024-04-02 08:25:04.000000000 +0000 @@ -1923,6 +1923,7 @@ struct AppxSearchInfo APPX_SEARCH[] = { // Releases made through the Store + { L"PythonSoftwareFoundation.Python.3.13_qbz5n2kfra8p0", L"3.13", 10 }, { L"PythonSoftwareFoundation.Python.3.12_qbz5n2kfra8p0", L"3.12", 10 }, { L"PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0", L"3.11", 10 }, { L"PythonSoftwareFoundation.Python.3.10_qbz5n2kfra8p0", L"3.10", 10 }, @@ -1932,6 +1933,7 @@ // Side-loadable releases. Note that the publisher ID changes whenever we // renew our code-signing certificate, so the newer ID has a higher // priority (lower sortKey) + { L"PythonSoftwareFoundation.Python.3.13_3847v3x7pw1km", L"3.13", 11 }, { L"PythonSoftwareFoundation.Python.3.12_3847v3x7pw1km", L"3.12", 11 }, { L"PythonSoftwareFoundation.Python.3.11_3847v3x7pw1km", L"3.11", 11 }, { L"PythonSoftwareFoundation.Python.3.11_hd69rhyc2wevp", L"3.11", 12 }, @@ -2012,7 +2014,8 @@ struct StoreSearchInfo STORE_SEARCH[] = { - { L"3", /* 3.11 */ L"9NRWMJP3717K" }, + { L"3", /* 3.12 */ L"9NCVDN91XZQP" }, + { L"3.13", L"9PNRBTZXMB4Z" }, { L"3.12", L"9NCVDN91XZQP" }, { L"3.11", L"9NRWMJP3717K" }, { L"3.10", L"9PJPW5LDXLZ5" }, diff -Nru python3.11-3.11.8/PC/layout/support/appxmanifest.py python3.11-3.11.9/PC/layout/support/appxmanifest.py --- python3.11-3.11.8/PC/layout/support/appxmanifest.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/PC/layout/support/appxmanifest.py 2024-04-02 08:25:04.000000000 +0000 @@ -210,7 +210,7 @@ result = ctypes.create_unicode_buffer(256) result_len = ctypes.c_uint32(256) r = ctypes.windll.kernel32.PackageFamilyNameFromId( - pid, ctypes.byref(result_len), result + ctypes.byref(pid), ctypes.byref(result_len), result ) if r: raise OSError(r, "failed to get package family name") diff -Nru python3.11-3.11.8/PCbuild/get_externals.bat python3.11-3.11.9/PCbuild/get_externals.bat --- python3.11-3.11.8/PCbuild/get_externals.bat 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/PCbuild/get_externals.bat 2024-04-02 08:25:04.000000000 +0000 @@ -54,7 +54,7 @@ set libraries=%libraries% bzip2-1.0.8 if NOT "%IncludeLibffiSrc%"=="false" set libraries=%libraries% libffi-3.4.4 if NOT "%IncludeSSLSrc%"=="false" set libraries=%libraries% openssl-3.0.13 -set libraries=%libraries% sqlite-3.43.1.0 +set libraries=%libraries% sqlite-3.45.1.0 if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tcl-core-8.6.12.1 if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tk-8.6.12.1 if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tix-8.4.3.6 diff -Nru python3.11-3.11.8/PCbuild/pyproject.props python3.11-3.11.9/PCbuild/pyproject.props --- python3.11-3.11.8/PCbuild/pyproject.props 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/PCbuild/pyproject.props 2024-04-02 08:25:04.000000000 +0000 @@ -233,7 +233,7 @@ - + diff -Nru python3.11-3.11.8/PCbuild/python.props python3.11-3.11.9/PCbuild/python.props --- python3.11-3.11.8/PCbuild/python.props 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/PCbuild/python.props 2024-04-02 08:25:04.000000000 +0000 @@ -68,7 +68,7 @@ - $(ExternalsDir)sqlite-3.43.1.0\ + $(ExternalsDir)sqlite-3.45.1.0\ $(ExternalsDir)bzip2-1.0.8\ $(ExternalsDir)xz-5.2.5\ $(ExternalsDir)libffi-3.4.4\ diff -Nru python3.11-3.11.8/PCbuild/readme.txt python3.11-3.11.9/PCbuild/readme.txt --- python3.11-3.11.8/PCbuild/readme.txt 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/PCbuild/readme.txt 2024-04-02 08:25:04.000000000 +0000 @@ -187,7 +187,7 @@ again when building. _sqlite3 - Wraps SQLite 3.43.1, which is itself built by sqlite3.vcxproj + Wraps SQLite 3.45.1, which is itself built by sqlite3.vcxproj Homepage: https://www.sqlite.org/ _tkinter diff -Nru python3.11-3.11.8/PCbuild/regen.targets python3.11-3.11.9/PCbuild/regen.targets --- python3.11-3.11.8/PCbuild/regen.targets 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/PCbuild/regen.targets 2024-04-02 08:25:04.000000000 +0000 @@ -109,9 +109,7 @@ Condition="($(Platform) == 'Win32' or $(Platform) == 'x64') and $(Configuration) != 'PGInstrument' and $(Configuration) != 'PGUpdate'"> - diff -Nru python3.11-3.11.8/Parser/parser.c python3.11-3.11.9/Parser/parser.c --- python3.11-3.11.8/Parser/parser.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Parser/parser.c 2024-04-02 08:25:04.000000000 +0000 @@ -6527,7 +6527,7 @@ UNUSED(_end_lineno); // Only used by EXTRA macro int _end_col_offset = _token->end_col_offset; UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = CHECK_VERSION ( stmt_ty , 9 , "Parenthesized context managers are" , _PyAST_With ( a , b , NULL , EXTRA ) ); + _res = _PyAST_With ( a , b , NULL , EXTRA ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; p->level--; diff -Nru python3.11-3.11.8/Parser/pegen_errors.c python3.11-3.11.9/Parser/pegen_errors.c --- python3.11-3.11.8/Parser/pegen_errors.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Parser/pegen_errors.c 2024-04-02 08:25:04.000000000 +0000 @@ -377,20 +377,18 @@ Py_ssize_t col_number = col_offset; Py_ssize_t end_col_number = end_col_offset; - if (p->tok->encoding != NULL) { - col_number = _PyPegen_byte_offset_to_character_offset(error_line, col_offset); - if (col_number < 0) { + col_number = _PyPegen_byte_offset_to_character_offset(error_line, col_offset); + if (col_number < 0) { + goto error; + } + + if (end_col_offset > 0) { + end_col_number = _PyPegen_byte_offset_to_character_offset(error_line, end_col_offset); + if (end_col_number < 0) { goto error; } - if (end_col_number > 0) { - Py_ssize_t end_col_offset = _PyPegen_byte_offset_to_character_offset(error_line, end_col_number); - if (end_col_offset < 0) { - goto error; - } else { - end_col_number = end_col_offset; - } - } } + tmp = Py_BuildValue("(OnnNnn)", p->tok->filename, lineno, col_number, error_line, end_lineno, end_col_number); if (!tmp) { goto error; diff -Nru python3.11-3.11.8/Python/compile.c python3.11-3.11.9/Python/compile.c --- python3.11-3.11.8/Python/compile.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Python/compile.c 2024-04-02 08:25:04.000000000 +0000 @@ -4050,6 +4050,7 @@ ADDOP_I(c, PRECALL, 0); ADDOP_I(c, CALL, 0); } + SET_LOC(c, s->v.Assert.test); ADDOP_I(c, RAISE_VARARGS, 1); compiler_use_next_block(c, end); return 1; diff -Nru python3.11-3.11.8/Python/getargs.c python3.11-3.11.9/Python/getargs.c --- python3.11-3.11.8/Python/getargs.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Python/getargs.c 2024-04-02 08:25:04.000000000 +0000 @@ -672,7 +672,7 @@ switch (c) { case 'b': { /* unsigned byte -- very short int */ - char *p = va_arg(*p_va, char *); + unsigned char *p = va_arg(*p_va, unsigned char *); long ival = PyLong_AsLong(arg); if (ival == -1 && PyErr_Occurred()) RETURN_ERR_OCCURRED; @@ -693,7 +693,7 @@ case 'B': {/* byte sized bitfield - both signed and unsigned values allowed */ - char *p = va_arg(*p_va, char *); + unsigned char *p = va_arg(*p_va, unsigned char *); unsigned long ival = PyLong_AsUnsignedLongMask(arg); if (ival == (unsigned long)-1 && PyErr_Occurred()) RETURN_ERR_OCCURRED; diff -Nru python3.11-3.11.8/Python/initconfig.c python3.11-3.11.9/Python/initconfig.c --- python3.11-3.11.8/Python/initconfig.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Python/initconfig.c 2024-04-02 08:25:04.000000000 +0000 @@ -33,8 +33,8 @@ /* Lines sorted by option name; keep in sync with usage_envvars* below */ static const char usage_help[] = "\ Options (and corresponding environment variables):\n\ --b : issue warnings about str(bytes_instance), str(bytearray_instance)\n\ - and comparing bytes/bytearray with str. (-bb: issue errors)\n\ +-b : issue warnings about converting bytes/bytearray to str and comparing\n\ + bytes/bytearray with str or bytes with int. (-bb: issue errors)\n\ -B : don't write .pyc files on import; also PYTHONDONTWRITEBYTECODE=x\n\ -c cmd : program passed in as string (terminates option list)\n\ -d : turn on parser debugging output (for experts only, only works on\n\ @@ -49,9 +49,10 @@ .pyc extension; also PYTHONOPTIMIZE=x\n\ -OO : do -O changes and also discard docstrings; add .opt-2 before\n\ .pyc extension\n\ --P : don't prepend a potentially unsafe path to sys.path; also PYTHONSAFEPATH\n\ +-P : don't prepend a potentially unsafe path to sys.path; also\n\ + PYTHONSAFEPATH\n\ -q : don't print version and copyright messages on interactive startup\n\ --s : don't add user site directory to sys.path; also PYTHONNOUSERSITE\n\ +-s : don't add user site directory to sys.path; also PYTHONNOUSERSITE=x\n\ -S : don't imply 'import site' on initialization\n\ -u : force the stdout and stderr streams to be unbuffered;\n\ this option has no effect on stdin; also PYTHONUNBUFFERED=x\n\ @@ -65,9 +66,10 @@ -X opt : set implementation-specific option\n\ --check-hash-based-pycs always|default|never:\n\ control how Python invalidates hash-based .pyc files\n\ ---help-env : print help about Python environment variables and exit\n\ ---help-xoptions : print help about implementation-specific -X options and exit\n\ ---help-all : print complete help information and exit\n\ +--help-env: print help about Python environment variables and exit\n\ +--help-xoptions: print help about implementation-specific -X options and exit\n\ +--help-all: print complete help information and exit\n\ +\n\ Arguments:\n\ file : program read from script file\n\ - : program read from stdin (default; interactive mode if a tty)\n\ @@ -76,105 +78,83 @@ static const char usage_xoptions[] = "\ The following implementation-specific options are available:\n\ -\n\ --X faulthandler: enable faulthandler\n\ -\n\ +-X dev : enable Python Development Mode; also PYTHONDEVMODE\n\ +-X faulthandler: dump the Python traceback on fatal errors;\n\ + also PYTHONFAULTHANDLER\n\ +-X frozen_modules=[on|off]: whether to use frozen modules; the default is \"on\"\n\ + for installed Python and \"off\" for a local build\n\ +-X importtime: show how long each import takes; also PYTHONPROFILEIMPORTTIME\n\ +-X int_max_str_digits=N: limit the size of int<->str conversions;\n\ + 0 disables the limit; also PYTHONINTMAXSTRDIGITS\n\ +-X no_debug_ranges: don't include extra location information in code objects;\n\ + also PYTHONNODEBUGRANGES\n\ +-X pycache_prefix=PATH: write .pyc files to a parallel tree instead of to the\n\ + code tree; also PYTHONPYCACHEPREFIX\n\ +" +#ifdef Py_STATS +"-X pystats: enable pystats collection at startup; also PYTHONSTATS\n" +#endif +"\ -X showrefcount: output the total reference count and number of used\n\ - memory blocks when the program finishes or after each statement in the\n\ - interactive interpreter. This only works on debug builds\n\ -\n\ --X tracemalloc: start tracing Python memory allocations using the\n\ - tracemalloc module. By default, only the most recent frame is stored in a\n\ - traceback of a trace. Use -X tracemalloc=NFRAME to start tracing with a\n\ - traceback limit of NFRAME frames\n\ -\n\ --X importtime: show how long each import takes. It shows module name,\n\ - cumulative time (including nested imports) and self time (excluding\n\ - nested imports). Note that its output may be broken in multi-threaded\n\ - application. Typical usage is python3 -X importtime -c 'import asyncio'\n\ -\n\ --X dev: enable CPython's \"development mode\", introducing additional runtime\n\ - checks which are too expensive to be enabled by default. Effect of the\n\ - developer mode:\n\ - * Add default warning filter, as -W default\n\ - * Install debug hooks on memory allocators: see the PyMem_SetupDebugHooks()\n\ - C function\n\ - * Enable the faulthandler module to dump the Python traceback on a crash\n\ - * Enable asyncio debug mode\n\ - * Set the dev_mode attribute of sys.flags to True\n\ - * io.IOBase destructor logs close() exceptions\n\ -\n\ --X utf8: enable UTF-8 mode for operating system interfaces, overriding the default\n\ - locale-aware mode. -X utf8=0 explicitly disables UTF-8 mode (even when it would\n\ - otherwise activate automatically)\n\ -\n\ --X pycache_prefix=PATH: enable writing .pyc files to a parallel tree rooted at the\n\ - given directory instead of to the code tree\n\ -\n\ --X warn_default_encoding: enable opt-in EncodingWarning for 'encoding=None'\n\ -\n\ --X no_debug_ranges: disable the inclusion of the tables mapping extra location \n\ - information (end line, start column offset and end column offset) to every \n\ - instruction in code objects. This is useful when smaller code objects and pyc \n\ - files are desired as well as suppressing the extra visual location indicators \n\ - when the interpreter displays tracebacks.\n\ -\n\ --X frozen_modules=[on|off]: whether or not frozen modules should be used.\n\ - The default is \"on\" (or \"off\" if you are running a local build).\n\ -\n\ --X int_max_str_digits=number: limit the size of int<->str conversions.\n\ - This helps avoid denial of service attacks when parsing untrusted data.\n\ - The default is sys.int_info.default_max_str_digits. 0 disables."; + memory blocks when the program finishes or after each statement in\n\ + the interactive interpreter; only works on debug builds\n\ +-X tracemalloc[=N]: trace Python memory allocations; N sets a traceback limit\n\ + of N frames (default: 1); also PYTHONTRACEMALLOC=N\n\ +-X utf8[=0|1]: enable (1) or disable (0) UTF-8 mode; also PYTHONUTF8\n\ +-X warn_default_encoding: enable opt-in EncodingWarning for 'encoding=None';\n\ + also PYTHONWARNDEFAULTENCODING\ +"; /* Envvars that don't have equivalent command-line options are listed first */ static const char usage_envvars[] = "Environment variables that change behavior:\n" -"PYTHONSTARTUP: file executed on interactive startup (no default)\n" -"PYTHONPATH : '%lc'-separated list of directories prefixed to the\n" -" default module search path. The result is sys.path.\n" -"PYTHONHOME : alternate directory (or %lc).\n" -" The default module search path uses %s.\n" -"PYTHONPLATLIBDIR : override sys.platlibdir.\n" -"PYTHONCASEOK : ignore case in 'import' statements (Windows).\n" -"PYTHONUTF8: if set to 1, enable the UTF-8 mode.\n" -"PYTHONIOENCODING: Encoding[:errors] used for stdin/stdout/stderr.\n" -"PYTHONFAULTHANDLER: dump the Python traceback on fatal errors.\n" -"PYTHONHASHSEED: if this variable is set to 'random', a random value is used\n" -" to seed the hashes of str and bytes objects. It can also be set to an\n" -" integer in the range [0,4294967295] to get hash values with a\n" -" predictable seed.\n" -"PYTHONINTMAXSTRDIGITS: limits the maximum digit characters in an int value\n" -" when converting from a string and when converting an int back to a str.\n" -" A value of 0 disables the limit. Conversions to or from bases 2, 4, 8,\n" -" 16, and 32 are never limited.\n" -"PYTHONMALLOC: set the Python memory allocators and/or install debug hooks\n" -" on Python memory allocators. Use PYTHONMALLOC=debug to install debug\n" -" hooks.\n" +"PYTHONSTARTUP : file executed on interactive startup (no default)\n" +"PYTHONPATH : '%lc'-separated list of directories prefixed to the\n" +" default module search path. The result is sys.path.\n" +"PYTHONHOME : alternate directory (or %lc).\n" +" The default module search path uses %s.\n" +"PYTHONPLATLIBDIR: override sys.platlibdir\n" +"PYTHONCASEOK : ignore case in 'import' statements (Windows)\n" +"PYTHONIOENCODING: encoding[:errors] used for stdin/stdout/stderr\n" +"PYTHONHASHSEED : if this variable is set to 'random', a random value is used\n" +" to seed the hashes of str and bytes objects. It can also be\n" +" set to an integer in the range [0,4294967295] to get hash\n" +" values with a predictable seed.\n" +"PYTHONMALLOC : set the Python memory allocators and/or install debug hooks\n" +" on Python memory allocators. Use PYTHONMALLOC=debug to\n" +" install debug hooks.\n" "PYTHONCOERCECLOCALE: if this variable is set to 0, it disables the locale\n" -" coercion behavior. Use PYTHONCOERCECLOCALE=warn to request display of\n" -" locale coercion and locale compatibility warnings on stderr.\n" +" coercion behavior. Use PYTHONCOERCECLOCALE=warn to request\n" +" display of locale coercion and locale compatibility warnings\n" +" on stderr.\n" "PYTHONBREAKPOINT: if this variable is set to 0, it disables the default\n" -" debugger. It can be set to the callable of your debugger of choice.\n" -"PYTHONDEVMODE: enable the development mode.\n" -"PYTHONPYCACHEPREFIX: root directory for bytecode cache (pyc) files.\n" -"PYTHONWARNDEFAULTENCODING: enable opt-in EncodingWarning for 'encoding=None'.\n" -"PYTHONNODEBUGRANGES: If this variable is set, it disables the inclusion of the \n" -" tables mapping extra location information (end line, start column offset \n" -" and end column offset) to every instruction in code objects. This is useful \n" -" when smaller code objects and pyc files are desired as well as suppressing the \n" -" extra visual location indicators when the interpreter displays tracebacks.\n" -"These variables have equivalent command-line parameters (see --help for details):\n" -"PYTHONDEBUG : enable parser debug mode (-d)\n" -"PYTHONDONTWRITEBYTECODE : don't write .pyc files (-B)\n" -"PYTHONINSPECT : inspect interactively after running script (-i)\n" -"PYTHONINTMAXSTRDIGITS : limit max digit characters in an int value\n" -" (-X int_max_str_digits=number)\n" -"PYTHONNOUSERSITE : disable user site directory (-s)\n" -"PYTHONOPTIMIZE : enable level 1 optimizations (-O)\n" -"PYTHONSAFEPATH : don't prepend a potentially unsafe path to sys.path (-P)\n" -"PYTHONUNBUFFERED : disable stdout/stderr buffering (-u)\n" -"PYTHONVERBOSE : trace import statements (-v)\n" -"PYTHONWARNINGS=arg : warning control (-W arg)\n"; +" debugger. It can be set to the callable of your debugger of\n" +" choice.\n" +"\n" +"These variables have equivalent command-line options (see --help for details):\n" +"PYTHONDEBUG : enable parser debug mode (-d)\n" +"PYTHONDEVMODE : enable Python Development Mode (-X dev)\n" +"PYTHONDONTWRITEBYTECODE: don't write .pyc files (-B)\n" +"PYTHONFAULTHANDLER: dump the Python traceback on fatal errors (-X faulthandler)\n" +"PYTHONINSPECT : inspect interactively after running script (-i)\n" +"PYTHONINTMAXSTRDIGITS: limit the size of int<->str conversions;\n" +" 0 disables the limit (-X int_max_str_digits=N)\n" +"PYTHONNODEBUGRANGES: don't include extra location information in code objects\n" +" (-X no_debug_ranges)\n" +"PYTHONNOUSERSITE: disable user site directory (-s)\n" +"PYTHONOPTIMIZE : enable level 1 optimizations (-O)\n" +"PYTHONPROFILEIMPORTTIME: show how long each import takes (-X importtime)\n" +"PYTHONPYCACHEPREFIX: root directory for bytecode cache (pyc) files\n" +" (-X pycache_prefix)\n" +"PYTHONSAFEPATH : don't prepend a potentially unsafe path to sys.path.\n" +"PYTHONTRACEMALLOC: trace Python memory allocations (-X tracemalloc)\n" +"PYTHONUNBUFFERED: disable stdout/stderr buffering (-u)\n" +"PYTHONUTF8 : control the UTF-8 mode (-X utf8)\n" +"PYTHONVERBOSE : trace import statements (-v)\n" +"PYTHONWARNDEFAULTENCODING: enable opt-in EncodingWarning for 'encoding=None'\n" +" (-X warn_default_encoding)\n" +"PYTHONWARNINGS : warning control (-W)\n" +; #if defined(MS_WINDOWS) # define PYTHONHOMEHELP "\\python{major}{minor}" @@ -2295,9 +2275,9 @@ config_complete_usage(const wchar_t* program) { config_usage(0, program); - puts("\n"); + putchar('\n'); config_envvars_usage(); - puts("\n"); + putchar('\n'); config_xoptions_usage(); } diff -Nru python3.11-3.11.8/Python/structmember.c python3.11-3.11.9/Python/structmember.c --- python3.11-3.11.8/Python/structmember.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Python/structmember.c 2024-04-02 08:25:04.000000000 +0000 @@ -3,6 +3,8 @@ #include "Python.h" #include "structmember.h" // PyMemberDef +#include "pycore_abstract.h" // _PyNumber_Index() + PyObject * PyMember_GetOne(const char *obj_addr, PyMemberDef *l) @@ -190,27 +192,22 @@ case T_UINT: { /* XXX: For compatibility, accept negative int values as well. */ - int overflow; - long long_val = PyLong_AsLongAndOverflow(v, &overflow); - if (long_val == -1 && PyErr_Occurred()) { - return -1; - } - if (overflow < 0) { - PyErr_SetString(PyExc_OverflowError, - "Python int too large to convert to C long"); + v = _PyNumber_Index(v); + if (v == NULL) { return -1; } - else if (!overflow) { - *(unsigned int *)addr = (unsigned int)(unsigned long)long_val; - if (long_val < 0) { - WARN("Writing negative value into unsigned field"); - } - else if ((unsigned long)long_val > UINT_MAX) { - WARN("Truncation of value to unsigned short"); + if (Py_SIZE(v) < 0) { + long long_val = PyLong_AsLong(v); + Py_DECREF(v); + if (long_val == -1 && PyErr_Occurred()) { + return -1; } + *(unsigned int *)addr = (unsigned int)(unsigned long)long_val; + WARN("Writing negative value into unsigned field"); } else { unsigned long ulong_val = PyLong_AsUnsignedLong(v); + Py_DECREF(v); if (ulong_val == (unsigned long)-1 && PyErr_Occurred()) { return -1; } @@ -230,24 +227,22 @@ case T_ULONG: { /* XXX: For compatibility, accept negative int values as well. */ - int overflow; - long long_val = PyLong_AsLongAndOverflow(v, &overflow); - if (long_val == -1 && PyErr_Occurred()) { - return -1; - } - if (overflow < 0) { - PyErr_SetString(PyExc_OverflowError, - "Python int too large to convert to C long"); + v = _PyNumber_Index(v); + if (v == NULL) { return -1; } - else if (!overflow) { - *(unsigned long *)addr = (unsigned long)long_val; - if (long_val < 0) { - WARN("Writing negative value into unsigned field"); + if (Py_SIZE(v) < 0) { + long long_val = PyLong_AsLong(v); + Py_DECREF(v); + if (long_val == -1 && PyErr_Occurred()) { + return -1; } + *(unsigned long *)addr = (unsigned long)long_val; + WARN("Writing negative value into unsigned field"); } else { unsigned long ulong_val = PyLong_AsUnsignedLong(v); + Py_DECREF(v); if (ulong_val == (unsigned long)-1 && PyErr_Occurred()) { return -1; } @@ -304,18 +299,30 @@ return -1; break; } - case T_ULONGLONG:{ - unsigned long long value; - /* ??? PyLong_AsLongLong accepts an int, but PyLong_AsUnsignedLongLong - doesn't ??? */ - if (PyLong_Check(v)) - *(unsigned long long*)addr = value = PyLong_AsUnsignedLongLong(v); - else - *(unsigned long long*)addr = value = PyLong_AsLong(v); - if ((value == (unsigned long long)-1) && PyErr_Occurred()) + case T_ULONGLONG: { + v = _PyNumber_Index(v); + if (v == NULL) { return -1; - break; } + if (Py_SIZE(v) < 0) { + long long_val = PyLong_AsLong(v); + Py_DECREF(v); + if (long_val == -1 && PyErr_Occurred()) { + return -1; + } + *(unsigned long long *)addr = (unsigned long long)(long long)long_val; + WARN("Writing negative value into unsigned field"); + } + else { + unsigned long long ulonglong_val = PyLong_AsUnsignedLongLong(v); + Py_DECREF(v); + if (ulonglong_val == (unsigned long long)-1 && PyErr_Occurred()) { + return -1; + } + *(unsigned long long*)addr = ulonglong_val; + } + break; + } default: PyErr_Format(PyExc_SystemError, "bad memberdescr type for %s", l->name); diff -Nru python3.11-3.11.8/Python/symtable.c python3.11-3.11.9/Python/symtable.c --- python3.11-3.11.8/Python/symtable.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Python/symtable.c 2024-04-02 08:25:04.000000000 +0000 @@ -1042,16 +1042,22 @@ } static long -symtable_lookup(struct symtable *st, PyObject *name) +symtable_lookup_entry(struct symtable *st, PySTEntryObject *ste, PyObject *name) { PyObject *mangled = _Py_Mangle(st->st_private, name); if (!mangled) return 0; - long ret = _PyST_GetSymbol(st->st_cur, mangled); + long ret = _PyST_GetSymbol(ste, mangled); Py_DECREF(mangled); return ret; } +static long +symtable_lookup(struct symtable *st, PyObject *name) +{ + return symtable_lookup_entry(st, st->st_cur, name); +} + static int symtable_add_def_helper(struct symtable *st, PyObject *name, int flag, struct _symtable_entry *ste, int lineno, int col_offset, int end_lineno, int end_col_offset) @@ -1525,7 +1531,7 @@ * binding conflict with iteration variables, otherwise skip it */ if (ste->ste_comprehension) { - long target_in_scope = _PyST_GetSymbol(ste, target_name); + long target_in_scope = symtable_lookup_entry(st, ste, target_name); if (target_in_scope & DEF_COMP_ITER) { PyErr_Format(PyExc_SyntaxError, NAMED_EXPR_COMP_CONFLICT, target_name); PyErr_RangedSyntaxLocationObject(st->st_filename, @@ -1540,7 +1546,7 @@ /* If we find a FunctionBlock entry, add as GLOBAL/LOCAL or NONLOCAL/LOCAL */ if (ste->ste_type == FunctionBlock) { - long target_in_scope = _PyST_GetSymbol(ste, target_name); + long target_in_scope = symtable_lookup_entry(st, ste, target_name); if (target_in_scope & DEF_GLOBAL) { if (!symtable_add_def(st, target_name, DEF_GLOBAL, LOCATION(e))) VISIT_QUIT(st, 0); diff -Nru python3.11-3.11.8/Python/sysmodule.c python3.11-3.11.9/Python/sysmodule.c --- python3.11-3.11.8/Python/sysmodule.c 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Python/sysmodule.c 2024-04-02 08:25:04.000000000 +0000 @@ -1363,31 +1363,33 @@ int field = 0; PyHash_FuncDef *hashfunc; hash_info = PyStructSequence_New(&Hash_InfoType); - if (hash_info == NULL) - return NULL; - hashfunc = PyHash_GetFuncDef(); - PyStructSequence_SET_ITEM(hash_info, field++, - PyLong_FromLong(8*sizeof(Py_hash_t))); - PyStructSequence_SET_ITEM(hash_info, field++, - PyLong_FromSsize_t(_PyHASH_MODULUS)); - PyStructSequence_SET_ITEM(hash_info, field++, - PyLong_FromLong(_PyHASH_INF)); - PyStructSequence_SET_ITEM(hash_info, field++, - PyLong_FromLong(0)); // This is no longer used - PyStructSequence_SET_ITEM(hash_info, field++, - PyLong_FromLong(_PyHASH_IMAG)); - PyStructSequence_SET_ITEM(hash_info, field++, - PyUnicode_FromString(hashfunc->name)); - PyStructSequence_SET_ITEM(hash_info, field++, - PyLong_FromLong(hashfunc->hash_bits)); - PyStructSequence_SET_ITEM(hash_info, field++, - PyLong_FromLong(hashfunc->seed_bits)); - PyStructSequence_SET_ITEM(hash_info, field++, - PyLong_FromLong(Py_HASH_CUTOFF)); - if (_PyErr_Occurred(tstate)) { - Py_CLEAR(hash_info); + if (hash_info == NULL) { return NULL; } + hashfunc = PyHash_GetFuncDef(); + +#define SET_HASH_INFO_ITEM(CALL) \ + do { \ + PyObject *item = (CALL); \ + if (item == NULL) { \ + Py_CLEAR(hash_info); \ + return NULL; \ + } \ + PyStructSequence_SET_ITEM(hash_info, field++, item); \ + } while(0) + + SET_HASH_INFO_ITEM(PyLong_FromLong(8 * sizeof(Py_hash_t))); + SET_HASH_INFO_ITEM(PyLong_FromSsize_t(_PyHASH_MODULUS)); + SET_HASH_INFO_ITEM(PyLong_FromLong(_PyHASH_INF)); + SET_HASH_INFO_ITEM(PyLong_FromLong(0)); // This is no longer used + SET_HASH_INFO_ITEM(PyLong_FromLong(_PyHASH_IMAG)); + SET_HASH_INFO_ITEM(PyUnicode_FromString(hashfunc->name)); + SET_HASH_INFO_ITEM(PyLong_FromLong(hashfunc->hash_bits)); + SET_HASH_INFO_ITEM(PyLong_FromLong(hashfunc->seed_bits)); + SET_HASH_INFO_ITEM(PyLong_FromLong(Py_HASH_CUTOFF)); + +#undef SET_HASH_INFO_ITEM + return hash_info; } /*[clinic input] @@ -1477,15 +1479,24 @@ if (version == NULL) return NULL; - PyStructSequence_SET_ITEM(version, pos++, PyLong_FromLong(ver.dwMajorVersion)); - PyStructSequence_SET_ITEM(version, pos++, PyLong_FromLong(ver.dwMinorVersion)); - PyStructSequence_SET_ITEM(version, pos++, PyLong_FromLong(ver.dwBuildNumber)); - PyStructSequence_SET_ITEM(version, pos++, PyLong_FromLong(ver.dwPlatformId)); - PyStructSequence_SET_ITEM(version, pos++, PyUnicode_FromWideChar(ver.szCSDVersion, -1)); - PyStructSequence_SET_ITEM(version, pos++, PyLong_FromLong(ver.wServicePackMajor)); - PyStructSequence_SET_ITEM(version, pos++, PyLong_FromLong(ver.wServicePackMinor)); - PyStructSequence_SET_ITEM(version, pos++, PyLong_FromLong(ver.wSuiteMask)); - PyStructSequence_SET_ITEM(version, pos++, PyLong_FromLong(ver.wProductType)); +#define SET_VERSION_INFO(CALL) \ + do { \ + PyObject *item = (CALL); \ + if (item == NULL) { \ + goto error; \ + } \ + PyStructSequence_SET_ITEM(version, pos++, item); \ + } while(0) + + SET_VERSION_INFO(PyLong_FromLong(ver.dwMajorVersion)); + SET_VERSION_INFO(PyLong_FromLong(ver.dwMinorVersion)); + SET_VERSION_INFO(PyLong_FromLong(ver.dwBuildNumber)); + SET_VERSION_INFO(PyLong_FromLong(ver.dwPlatformId)); + SET_VERSION_INFO(PyUnicode_FromWideChar(ver.szCSDVersion, -1)); + SET_VERSION_INFO(PyLong_FromLong(ver.wServicePackMajor)); + SET_VERSION_INFO(PyLong_FromLong(ver.wServicePackMinor)); + SET_VERSION_INFO(PyLong_FromLong(ver.wSuiteMask)); + SET_VERSION_INFO(PyLong_FromLong(ver.wProductType)); realMajor = ver.dwMajorVersion; realMinor = ver.dwMinorVersion; @@ -1512,17 +1523,19 @@ } PyMem_RawFree(verblock); } - PyStructSequence_SET_ITEM(version, pos++, Py_BuildValue("(kkk)", + SET_VERSION_INFO(Py_BuildValue("(kkk)", realMajor, realMinor, realBuild )); - if (PyErr_Occurred()) { - Py_DECREF(version); - return NULL; - } +#undef SET_VERSION_INFO + return version; + +error: + Py_DECREF(version); + return NULL; } #pragma warning(pop) diff -Nru python3.11-3.11.8/README.rst python3.11-3.11.9/README.rst --- python3.11-3.11.8/README.rst 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/README.rst 2024-04-02 08:25:04.000000000 +0000 @@ -1,4 +1,4 @@ -This is Python version 3.11.8 +This is Python version 3.11.9 ============================= .. image:: https://github.com/python/cpython/workflows/Tests/badge.svg @@ -158,15 +158,6 @@ `_. -Converting From Python 2.x to 3.x ---------------------------------- - -Significant backward incompatible changes were made for the release of Python -3.0, which may cause programs written for Python 2 to fail when run with Python -3. For more information about porting your code from Python 2 to Python 3, see -the `Porting HOWTO `_. - - Testing ------- diff -Nru python3.11-3.11.8/Tools/c-analyzer/README python3.11-3.11.9/Tools/c-analyzer/README --- python3.11-3.11.8/Tools/c-analyzer/README 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Tools/c-analyzer/README 2024-04-02 08:25:04.000000000 +0000 @@ -11,9 +11,8 @@ * module state * Python runtime state -The ignored-globals.txt file is organized similarly. Of the different -categories, the last two are problematic and generally should not exist -in the codebase. +Of the different categories, the last two are problematic and +generally should not exist in the codebase. Globals that hold module state (i.e. in Modules/*.c) cause problems when multiple interpreters are in use. For more info, see PEP 3121, @@ -42,4 +41,3 @@ If it reports any globals then they should be resolved. If the globals are runtime state then they should be folded into _PyRuntimeState. -Otherwise they should be added to ignored-globals.txt. diff -Nru python3.11-3.11.8/Tools/c-analyzer/cpython/_parser.py python3.11-3.11.9/Tools/c-analyzer/cpython/_parser.py --- python3.11-3.11.8/Tools/c-analyzer/cpython/_parser.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Tools/c-analyzer/cpython/_parser.py 2024-04-02 08:25:04.000000000 +0000 @@ -109,6 +109,7 @@ Modules/_dbmmodule.c Modules/cjkcodecs/_codecs_*.c +Modules/expat/internal.h Modules/expat/xmlrole.c Modules/expat/xmlparse.c Python/initconfig.c diff -Nru python3.11-3.11.8/Tools/c-analyzer/cpython/globals-to-fix.tsv python3.11-3.11.9/Tools/c-analyzer/cpython/globals-to-fix.tsv --- python3.11-3.11.8/Tools/c-analyzer/cpython/globals-to-fix.tsv 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Tools/c-analyzer/cpython/globals-to-fix.tsv 2024-04-02 08:25:04.000000000 +0000 @@ -1273,6 +1273,7 @@ Modules/_decimal/_decimal.c - current_context_var - Modules/_decimal/_decimal.c - default_context_template - Modules/_decimal/_decimal.c - extended_context_template - +Modules/_decimal/_decimal.c - PyDecimal - Modules/_decimal/_decimal.c - round_map - Modules/_decimal/_decimal.c - Rational - Modules/_decimal/_decimal.c - SignalTuple - diff -Nru python3.11-3.11.8/Tools/msi/bundle/Default.wxl python3.11-3.11.9/Tools/msi/bundle/Default.wxl --- python3.11-3.11.8/Tools/msi/bundle/Default.wxl 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Tools/msi/bundle/Default.wxl 2024-04-02 08:25:04.000000000 +0000 @@ -88,6 +88,7 @@ Install Python [ShortVersion] for &all users for &all users (requires admin privileges) Use admin privi&leges when installing py.exe + Python Launcher is already installed &Precompile standard library Download debugging &symbols Download debu&g binaries (requires VS 2017 or later) diff -Nru python3.11-3.11.8/Tools/msi/bundle/bootstrap/PythonBootstrapperApplication.cpp python3.11-3.11.9/Tools/msi/bundle/bootstrap/PythonBootstrapperApplication.cpp --- python3.11-3.11.8/Tools/msi/bundle/bootstrap/PythonBootstrapperApplication.cpp 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Tools/msi/bundle/bootstrap/PythonBootstrapperApplication.cpp 2024-04-02 08:25:04.000000000 +0000 @@ -442,6 +442,14 @@ ThemeControlElevates(_theme, ID_INSTALL_BUTTON, elevated); ThemeControlElevates(_theme, ID_INSTALL_SIMPLE_BUTTON, elevated); ThemeControlElevates(_theme, ID_INSTALL_UPGRADE_BUTTON, elevated); + + LONGLONG blockedLauncher; + if (SUCCEEDED(BalGetNumericVariable(L"BlockedLauncher", &blockedLauncher)) && blockedLauncher) { + LOC_STRING *pLocString = nullptr; + if (SUCCEEDED(LocGetString(_wixLoc, L"#(loc.ShortInstallLauncherBlockedLabel)", &pLocString)) && pLocString) { + ThemeSetTextControl(_theme, ID_INSTALL_LAUNCHER_ALL_USERS_CHECKBOX, pLocString->wzText); + } + } } void Custom1Page_Show() { @@ -718,25 +726,67 @@ __in DWORD64 /*dw64Version*/, __in BOOTSTRAPPER_RELATED_OPERATION operation ) { - if (BOOTSTRAPPER_RELATED_OPERATION_MAJOR_UPGRADE == operation && - (CSTR_EQUAL == ::CompareStringW(LOCALE_NEUTRAL, 0, wzPackageId, -1, L"launcher_AllUsers", -1) || - CSTR_EQUAL == ::CompareStringW(LOCALE_NEUTRAL, 0, wzPackageId, -1, L"launcher_JustForMe", -1))) { - auto hr = LoadAssociateFilesStateFromKey(_engine, fPerMachine ? HKEY_LOCAL_MACHINE : HKEY_CURRENT_USER); - if (hr == S_OK) { - _engine->SetVariableNumeric(L"AssociateFiles", 1); - } else if (hr == S_FALSE) { - _engine->SetVariableNumeric(L"AssociateFiles", 0); - } else if (FAILED(hr)) { - BalLog(BOOTSTRAPPER_LOG_LEVEL_ERROR, "Failed to load AssociateFiles state: error code 0x%08X", hr); - } - - LONGLONG includeLauncher; - if (FAILED(BalGetNumericVariable(L"Include_launcher", &includeLauncher)) - || includeLauncher == -1) { - _engine->SetVariableNumeric(L"Include_launcher", 1); - _engine->SetVariableNumeric(L"InstallLauncherAllUsers", fPerMachine ? 1 : 0); + // Only check launcher_AllUsers because we'll find the same packages + // twice if we check launcher_JustForMe as well. + if (CSTR_EQUAL == ::CompareStringW(LOCALE_NEUTRAL, 0, wzPackageId, -1, L"launcher_AllUsers", -1)) { + BalLog(BOOTSTRAPPER_LOG_LEVEL_STANDARD, "Detected existing launcher install"); + + LONGLONG blockedLauncher, detectedLauncher; + if (FAILED(BalGetNumericVariable(L"BlockedLauncher", &blockedLauncher))) { + blockedLauncher = 0; + } + + // Get the prior DetectedLauncher value so we can see if we've + // detected more than one, and then update the stored variable + // (we use the original value later on via the local). + if (FAILED(BalGetNumericVariable(L"DetectedLauncher", &detectedLauncher))) { + detectedLauncher = 0; + } + if (!detectedLauncher) { + _engine->SetVariableNumeric(L"DetectedLauncher", 1); + } + + if (blockedLauncher) { + // Nothing else to do, we're already blocking + } + else if (BOOTSTRAPPER_RELATED_OPERATION_DOWNGRADE == operation) { + // Found a higher version, so we can't install ours. + BalLog(BOOTSTRAPPER_LOG_LEVEL_ERROR, "Higher version launcher has been detected."); + BalLog(BOOTSTRAPPER_LOG_LEVEL_ERROR, "Launcher will not be installed"); + _engine->SetVariableNumeric(L"BlockedLauncher", 1); + } + else if (detectedLauncher) { + if (!blockedLauncher) { + BalLog(BOOTSTRAPPER_LOG_LEVEL_ERROR, "Multiple launcher installs have been detected."); + BalLog(BOOTSTRAPPER_LOG_LEVEL_ERROR, "No launcher will be installed or upgraded until one has been removed."); + _engine->SetVariableNumeric(L"BlockedLauncher", 1); + } + } + else if (BOOTSTRAPPER_RELATED_OPERATION_MAJOR_UPGRADE == operation) { + // Found an older version, so let's run the equivalent as an upgrade + // This overrides "unknown" all users options, but will leave alone + // any that have already been set/detected. + // User can deselect the option to include the launcher, but cannot + // change it from the current per user/machine setting. + LONGLONG includeLauncher, includeLauncherAllUsers; + if (FAILED(BalGetNumericVariable(L"Include_launcher", &includeLauncher))) { + includeLauncher = -1; + } + if (FAILED(BalGetNumericVariable(L"InstallLauncherAllUsers", &includeLauncherAllUsers))) { + includeLauncherAllUsers = -1; + } + + if (includeLauncher < 0) { + _engine->SetVariableNumeric(L"Include_launcher", 1); + } + if (includeLauncherAllUsers < 0) { + _engine->SetVariableNumeric(L"InstallLauncherAllUsers", fPerMachine ? 1 : 0); + } else if (includeLauncherAllUsers != fPerMachine ? 1 : 0) { + // Requested AllUsers option is inconsistent, so block + _engine->SetVariableNumeric(L"BlockedLauncher", 1); + } + _engine->SetVariableNumeric(L"DetectedOldLauncher", 1); } - _engine->SetVariableNumeric(L"DetectedOldLauncher", 1); } return CheckCanceled() ? IDCANCEL : IDNOACTION; } @@ -784,48 +834,7 @@ __in LPCWSTR wzPackageId, __in HRESULT hrStatus, __in BOOTSTRAPPER_PACKAGE_STATE state - ) { - if (FAILED(hrStatus)) { - return; - } - - BOOL detectedLauncher = FALSE; - HKEY hkey = HKEY_LOCAL_MACHINE; - if (CSTR_EQUAL == ::CompareStringW(LOCALE_NEUTRAL, 0, wzPackageId, -1, L"launcher_AllUsers", -1)) { - if (BOOTSTRAPPER_PACKAGE_STATE_PRESENT == state || BOOTSTRAPPER_PACKAGE_STATE_OBSOLETE == state) { - detectedLauncher = TRUE; - _engine->SetVariableNumeric(L"InstallLauncherAllUsers", 1); - } - } else if (CSTR_EQUAL == ::CompareStringW(LOCALE_NEUTRAL, 0, wzPackageId, -1, L"launcher_JustForMe", -1)) { - if (BOOTSTRAPPER_PACKAGE_STATE_PRESENT == state || BOOTSTRAPPER_PACKAGE_STATE_OBSOLETE == state) { - detectedLauncher = TRUE; - _engine->SetVariableNumeric(L"InstallLauncherAllUsers", 0); - } - } - - LONGLONG includeLauncher; - if (SUCCEEDED(BalGetNumericVariable(L"Include_launcher", &includeLauncher)) - && includeLauncher != -1) { - detectedLauncher = FALSE; - } - - if (detectedLauncher) { - /* When we detect the current version of the launcher. */ - _engine->SetVariableNumeric(L"Include_launcher", 1); - _engine->SetVariableNumeric(L"DetectedLauncher", 1); - _engine->SetVariableString(L"Include_launcherState", L"disable"); - _engine->SetVariableString(L"InstallLauncherAllUsersState", L"disable"); - - auto hr = LoadAssociateFilesStateFromKey(_engine, hkey); - if (hr == S_OK) { - _engine->SetVariableNumeric(L"AssociateFiles", 1); - } else if (hr == S_FALSE) { - _engine->SetVariableNumeric(L"AssociateFiles", 0); - } else if (FAILED(hr)) { - BalLog(BOOTSTRAPPER_LOG_LEVEL_ERROR, "Failed to load AssociateFiles state: error code 0x%08X", hr); - } - } - } + ) { } virtual STDMETHODIMP_(void) OnDetectComplete(__in HRESULT hrStatus) { @@ -835,19 +844,68 @@ } if (SUCCEEDED(hrStatus)) { - LONGLONG includeLauncher; - if (SUCCEEDED(BalGetNumericVariable(L"Include_launcher", &includeLauncher)) - && includeLauncher == -1) { - if (BOOTSTRAPPER_ACTION_LAYOUT == _command.action || - (BOOTSTRAPPER_ACTION_INSTALL == _command.action && !_upgrading)) { - // When installing/downloading, we want to include the launcher - // by default. - _engine->SetVariableNumeric(L"Include_launcher", 1); - } else { - // Any other action, if we didn't detect the MSI then we want to - // keep it excluded - _engine->SetVariableNumeric(L"Include_launcher", 0); - _engine->SetVariableNumeric(L"AssociateFiles", 0); + // Update launcher install states + // If we didn't detect any existing installs, Include_launcher and + // InstallLauncherAllUsers will both be -1, so we will set to their + // defaults and leave the options enabled. + // Otherwise, if we detected an existing install, we disable the + // options so they remain fixed. + // The code in OnDetectRelatedMsiPackage is responsible for figuring + // out whether existing installs are compatible with the settings in + // place during detection. + LONGLONG blockedLauncher; + if (SUCCEEDED(BalGetNumericVariable(L"BlockedLauncher", &blockedLauncher)) + && blockedLauncher) { + _engine->SetVariableNumeric(L"Include_launcher", 0); + _engine->SetVariableNumeric(L"InstallLauncherAllUsers", 0); + _engine->SetVariableString(L"InstallLauncherAllUsersState", L"disable"); + _engine->SetVariableString(L"Include_launcherState", L"disable"); + } + else { + LONGLONG includeLauncher, includeLauncherAllUsers, associateFiles; + + if (FAILED(BalGetNumericVariable(L"Include_launcher", &includeLauncher))) { + includeLauncher = -1; + } + if (FAILED(BalGetNumericVariable(L"InstallLauncherAllUsers", &includeLauncherAllUsers))) { + includeLauncherAllUsers = -1; + } + if (FAILED(BalGetNumericVariable(L"AssociateFiles", &associateFiles))) { + associateFiles = -1; + } + + if (includeLauncherAllUsers < 0) { + // Python 3.11 installer defaults to 1. Later versions use 0 + includeLauncherAllUsers = 1; + _engine->SetVariableNumeric(L"InstallLauncherAllUsers", includeLauncherAllUsers); + } + + if (includeLauncher < 0) { + if (BOOTSTRAPPER_ACTION_LAYOUT == _command.action || + (BOOTSTRAPPER_ACTION_INSTALL == _command.action && !_upgrading)) { + // When installing/downloading, we include the launcher + // (though downloads should ignore this setting anyway) + _engine->SetVariableNumeric(L"Include_launcher", 1); + } else { + // Any other action, we should have detected an existing + // install (e.g. on remove/modify), so if we didn't, we + // assume it's not selected. + _engine->SetVariableNumeric(L"Include_launcher", 0); + _engine->SetVariableNumeric(L"AssociateFiles", 0); + } + } + + if (associateFiles < 0) { + auto hr = LoadAssociateFilesStateFromKey( + _engine, + includeLauncherAllUsers ? HKEY_LOCAL_MACHINE : HKEY_CURRENT_USER + ); + if (FAILED(hr)) { + BalLog(BOOTSTRAPPER_LOG_LEVEL_ERROR, "Failed to load AssociateFiles state: error code 0x%08X", hr); + } else if (hr == S_OK) { + associateFiles = 1; + } + _engine->SetVariableNumeric(L"AssociateFiles", associateFiles); } } } diff -Nru python3.11-3.11.8/Tools/msi/bundle/bundle.wxs python3.11-3.11.9/Tools/msi/bundle/bundle.wxs --- python3.11-3.11.8/Tools/msi/bundle/bundle.wxs 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Tools/msi/bundle/bundle.wxs 2024-04-02 08:25:04.000000000 +0000 @@ -28,10 +28,11 @@ - + - + + @@ -84,10 +85,11 @@ + - + diff -Nru python3.11-3.11.8/Tools/scripts/sortperf.py python3.11-3.11.9/Tools/scripts/sortperf.py --- python3.11-3.11.8/Tools/scripts/sortperf.py 1970-01-01 00:00:00.000000000 +0000 +++ python3.11-3.11.9/Tools/scripts/sortperf.py 2024-04-02 08:25:04.000000000 +0000 @@ -0,0 +1,197 @@ +""" +List sort performance test. + +To install `pyperf` you would need to: + + python3 -m pip install pyperf + +To run: + + python3 Tools/scripts/sortperf + +Options: + + * `benchmark` name to run + * `--rnd-seed` to set random seed + * `--size` to set the sorted list size + +Based on https://github.com/python/cpython/blob/963904335e579bfe39101adf3fd6a0cf705975ff/Lib/test/sortperf.py +""" + +from __future__ import annotations + +import argparse +import time +import random + + +# =============== +# Data generation +# =============== + +def _random_data(size: int, rand: random.Random) -> list[float]: + result = [rand.random() for _ in range(size)] + # Shuffle it a bit... + for i in range(10): + i = rand.randrange(size) + temp = result[:i] + del result[:i] + temp.reverse() + result.extend(temp) + del temp + assert len(result) == size + return result + + +def list_sort(size: int, rand: random.Random) -> list[float]: + return _random_data(size, rand) + + +def list_sort_descending(size: int, rand: random.Random) -> list[float]: + return list(reversed(list_sort_ascending(size, rand))) + + +def list_sort_ascending(size: int, rand: random.Random) -> list[float]: + return sorted(_random_data(size, rand)) + + +def list_sort_ascending_exchanged(size: int, rand: random.Random) -> list[float]: + result = list_sort_ascending(size, rand) + # Do 3 random exchanges. + for _ in range(3): + i1 = rand.randrange(size) + i2 = rand.randrange(size) + result[i1], result[i2] = result[i2], result[i1] + return result + + +def list_sort_ascending_random(size: int, rand: random.Random) -> list[float]: + assert size >= 10, "This benchmark requires size to be >= 10" + result = list_sort_ascending(size, rand) + # Replace the last 10 with random floats. + result[-10:] = [rand.random() for _ in range(10)] + return result + + +def list_sort_ascending_one_percent(size: int, rand: random.Random) -> list[float]: + result = list_sort_ascending(size, rand) + # Replace 1% of the elements at random. + for _ in range(size // 100): + result[rand.randrange(size)] = rand.random() + return result + + +def list_sort_duplicates(size: int, rand: random.Random) -> list[float]: + assert size >= 4 + result = list_sort_ascending(4, rand) + # Arrange for lots of duplicates. + result = result * (size // 4) + # Force the elements to be distinct objects, else timings can be + # artificially low. + return list(map(abs, result)) + + +def list_sort_equal(size: int, rand: random.Random) -> list[float]: + # All equal. Again, force the elements to be distinct objects. + return list(map(abs, [-0.519012] * size)) + + +def list_sort_worst_case(size: int, rand: random.Random) -> list[float]: + # This one looks like [3, 2, 1, 0, 0, 1, 2, 3]. It was a bad case + # for an older implementation of quicksort, which used the median + # of the first, last and middle elements as the pivot. + half = size // 2 + result = list(range(half - 1, -1, -1)) + result.extend(range(half)) + # Force to float, so that the timings are comparable. This is + # significantly faster if we leave them as ints. + return list(map(float, result)) + + +# ========= +# Benchmark +# ========= + +class Benchmark: + def __init__(self, name: str, size: int, seed: int) -> None: + self._name = name + self._size = size + self._seed = seed + self._random = random.Random(self._seed) + + def run(self, loops: int) -> float: + all_data = self._prepare_data(loops) + start = time.perf_counter() + + for data in all_data: + data.sort() # Benching this method! + + return time.perf_counter() - start + + def _prepare_data(self, loops: int) -> list[float]: + bench = BENCHMARKS[self._name] + data = bench(self._size, self._random) + return [data.copy() for _ in range(loops)] + + +def add_cmdline_args(cmd: list[str], args) -> None: + if args.benchmark: + cmd.append(args.benchmark) + cmd.append(f"--size={args.size}") + cmd.append(f"--rng-seed={args.rng_seed}") + + +def add_parser_args(parser: argparse.ArgumentParser) -> None: + parser.add_argument( + "benchmark", + choices=BENCHMARKS, + nargs="?", + help="Can be any of: {0}".format(", ".join(BENCHMARKS)), + ) + parser.add_argument( + "--size", + type=int, + default=DEFAULT_SIZE, + help=f"Size of the lists to sort (default: {DEFAULT_SIZE})", + ) + parser.add_argument( + "--rng-seed", + type=int, + default=DEFAULT_RANDOM_SEED, + help=f"Random number generator seed (default: {DEFAULT_RANDOM_SEED})", + ) + + +DEFAULT_SIZE = 1 << 14 +DEFAULT_RANDOM_SEED = 0 +BENCHMARKS = { + "list_sort": list_sort, + "list_sort_descending": list_sort_descending, + "list_sort_ascending": list_sort_ascending, + "list_sort_ascending_exchanged": list_sort_ascending_exchanged, + "list_sort_ascending_random": list_sort_ascending_random, + "list_sort_ascending_one_percent": list_sort_ascending_one_percent, + "list_sort_duplicates": list_sort_duplicates, + "list_sort_equal": list_sort_equal, + "list_sort_worst_case": list_sort_worst_case, +} + +if __name__ == "__main__": + # This needs `pyperf` 3rd party library: + import pyperf + + runner = pyperf.Runner(add_cmdline_args=add_cmdline_args) + add_parser_args(runner.argparser) + args = runner.parse_args() + + runner.metadata["description"] = "Test `list.sort()` with different data" + runner.metadata["list_sort_size"] = args.size + runner.metadata["list_sort_random_seed"] = args.rng_seed + + if args.benchmark: + benchmarks = (args.benchmark,) + else: + benchmarks = sorted(BENCHMARKS) + for bench in benchmarks: + benchmark = Benchmark(bench, args.size, args.rng_seed) + runner.bench_time_func(bench, benchmark.run) diff -Nru python3.11-3.11.8/Tools/scripts/stable_abi.py python3.11-3.11.9/Tools/scripts/stable_abi.py --- python3.11-3.11.8/Tools/scripts/stable_abi.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Tools/scripts/stable_abi.py 2024-04-02 08:25:04.000000000 +0000 @@ -599,7 +599,7 @@ if name.startswith('_') and not item.abi_only: raise ValueError( f'`{name}` is private (underscore-prefixed) and should be ' - + 'removed from the stable ABI list or or marked `abi_only`') + + 'removed from the stable ABI list or marked `abi_only`') def check_dump(manifest, filename): """Check that manifest.dump() corresponds to the data. diff -Nru python3.11-3.11.8/Tools/wasm/config.site-wasm32-wasi python3.11-3.11.9/Tools/wasm/config.site-wasm32-wasi --- python3.11-3.11.8/Tools/wasm/config.site-wasm32-wasi 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Tools/wasm/config.site-wasm32-wasi 2024-04-02 08:25:04.000000000 +0000 @@ -43,3 +43,12 @@ # Disable int-conversion for wask-sdk as it triggers an error from version 17. ac_cv_disable_int_conversion=yes + +# preadv(), readv(), pwritev(), and writev() under wasmtime's WASI 0.2 support +# do not use more than the first buffer provided, failing under test_posix. +# Since wasmtime will not be changing this behaviour, disable the functions. +# https://github.com/bytecodealliance/wasmtime/issues/7830 +ac_cv_func_preadv=no +ac_cv_func_readv=no +ac_cv_func_pwritev=no +ac_cv_func_writev=no diff -Nru python3.11-3.11.8/Tools/wasm/wasi-env python3.11-3.11.9/Tools/wasm/wasi-env --- python3.11-3.11.8/Tools/wasm/wasi-env 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Tools/wasm/wasi-env 2024-04-02 08:25:04.000000000 +0000 @@ -55,7 +55,6 @@ CXX="ccache ${CXX}" fi -LDSHARED="${WASI_SDK_PATH}/bin/wasm-ld" AR="${WASI_SDK_PATH}/bin/llvm-ar" RANLIB="${WASI_SDK_PATH}/bin/ranlib" diff -Nru python3.11-3.11.8/Tools/wasm/wasm_build.py python3.11-3.11.9/Tools/wasm/wasm_build.py --- python3.11-3.11.8/Tools/wasm/wasm_build.py 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/Tools/wasm/wasm_build.py 2024-04-02 08:25:04.000000000 +0000 @@ -316,8 +316,10 @@ # workaround for https://github.com/python/cpython/issues/95952 "HOSTRUNNER": ( "wasmtime run " - "--env PYTHONPATH=/{relbuilddir}/build/lib.wasi-wasm32-{version}:/Lib " - "--mapdir /::{srcdir} --" + "--wasm max-wasm-stack=8388608 " + "--wasi preview2 " + "--dir {srcdir}::/ " + "--env PYTHONPATH=/{relbuilddir}/build/lib.wasi-wasm32-{version}:/Lib" ), "PATH": [WASI_SDK_PATH / "bin", os.environ["PATH"]], }, diff -Nru python3.11-3.11.8/configure python3.11-3.11.9/configure --- python3.11-3.11.8/configure 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/configure 2024-04-02 08:25:04.000000000 +0000 @@ -6961,7 +6961,7 @@ fi ;; #( WASI/*) : - HOSTRUNNER='wasmtime run --env PYTHONPATH=/$(shell realpath --relative-to $(abs_srcdir) $(abs_builddir))/$(shell cat pybuilddir.txt) --mapdir /::$(srcdir) --' ;; #( + HOSTRUNNER='wasmtime run --wasm max-wasm-stack=8388608 --wasi preview2 --env PYTHONPATH=/$(shell realpath --relative-to $(abs_srcdir) $(abs_builddir))/$(shell cat pybuilddir.txt):/Lib --dir $(srcdir)::/' ;; #( *) : HOSTRUNNER='' ;; diff -Nru python3.11-3.11.8/configure.ac python3.11-3.11.9/configure.ac --- python3.11-3.11.8/configure.ac 2024-02-06 21:21:21.000000000 +0000 +++ python3.11-3.11.9/configure.ac 2024-04-02 08:25:04.000000000 +0000 @@ -1580,7 +1580,7 @@ dnl TODO: support other WASI runtimes dnl wasmtime starts the proces with "/" as CWD. For OOT builds add the dnl directory containing _sysconfigdata to PYTHONPATH. - [WASI/*], [HOSTRUNNER='wasmtime run --env PYTHONPATH=/$(shell realpath --relative-to $(abs_srcdir) $(abs_builddir))/$(shell cat pybuilddir.txt) --mapdir /::$(srcdir) --'], + [WASI/*], [HOSTRUNNER='wasmtime run --wasm max-wasm-stack=8388608 --wasi preview2 --env PYTHONPATH=/$(shell realpath --relative-to $(abs_srcdir) $(abs_builddir))/$(shell cat pybuilddir.txt):/Lib --dir $(srcdir)::/'], [HOSTRUNNER=''] ) fi diff -Nru python3.11-3.11.8/debian/changelog python3.11-3.11.9/debian/changelog --- python3.11-3.11.8/debian/changelog 2024-02-25 16:41:26.000000000 +0000 +++ python3.11-3.11.9/debian/changelog 2024-04-06 17:59:24.000000000 +0000 @@ -1,3 +1,9 @@ +python3.11 (3.11.9-1+focal1) focal; urgency=medium + + * Python 3.11.9 release. + + -- Anthony Sottile (deadsnakes) Sat, 06 Apr 2024 17:59:24 +0000 + python3.11 (3.11.8-1+focal2) focal; urgency=medium * Update idlelib's NEWS filename. diff -Nru python3.11-3.11.8/debian/patches/0028-sysconfigdata-name.patch python3.11-3.11.9/debian/patches/0028-sysconfigdata-name.patch --- python3.11-3.11.8/debian/patches/0028-sysconfigdata-name.patch 2024-02-25 16:41:26.000000000 +0000 +++ python3.11-3.11.9/debian/patches/0028-sysconfigdata-name.patch 2024-04-06 17:59:24.000000000 +0000 @@ -28,10 +28,10 @@ diff --git a/Makefile.pre.in b/Makefile.pre.in -index 6e8113e..d688a05 100644 +index 68f499c..645eb49 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in -@@ -2135,8 +2135,10 @@ libinstall: all $(srcdir)/Modules/xxmodule.c +@@ -2140,8 +2140,10 @@ libinstall: all $(srcdir)/Modules/xxmodule.c esac; \ done; \ done @@ -44,7 +44,7 @@ $(INSTALL_DATA) $(srcdir)/LICENSE $(DESTDIR)$(LIBDEST)/LICENSE.txt if test -d $(DESTDIR)$(LIBDEST)/distutils/tests; then \ $(INSTALL_DATA) $(srcdir)/Modules/xxmodule.c \ -@@ -2295,6 +2297,7 @@ sharedinstall: all +@@ -2300,6 +2302,7 @@ sharedinstall: all --install-scripts=$(BINDIR) \ --install-platlib=$(DESTSHARED) \ --root=$(DESTDIR)/ @@ -53,7 +53,7 @@ -rm -r $(DESTDIR)$(DESTSHARED)/__pycache__ diff --git a/configure.ac b/configure.ac -index 3e8544d..882270c 100644 +index aff818e..45a2146 100644 --- a/configure.ac +++ b/configure.ac @@ -162,7 +162,7 @@ AC_ARG_WITH( diff -Nru python3.11-3.11.8/debian/patches/0029-destshared-location.patch python3.11-3.11.9/debian/patches/0029-destshared-location.patch --- python3.11-3.11.8/debian/patches/0029-destshared-location.patch 2024-02-25 16:41:26.000000000 +0000 +++ python3.11-3.11.9/debian/patches/0029-destshared-location.patch 2024-04-06 17:59:24.000000000 +0000 @@ -12,7 +12,7 @@ 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile.pre.in b/Makefile.pre.in -index d688a05..e8dbef1 100644 +index 645eb49..3c3038b 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -165,7 +165,7 @@ EXT_SUFFIX= @EXT_SUFFIX@ diff -Nru python3.11-3.11.8/debian/patches/argparse-no-shutil.diff python3.11-3.11.9/debian/patches/argparse-no-shutil.diff --- python3.11-3.11.8/debian/patches/argparse-no-shutil.diff 2024-02-25 16:41:26.000000000 +0000 +++ python3.11-3.11.9/debian/patches/argparse-no-shutil.diff 2024-04-06 17:59:24.000000000 +0000 @@ -7,7 +7,7 @@ 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/Lib/argparse.py b/Lib/argparse.py -index 85a592e..13f3456 100644 +index cc7f84e..6435549 100644 --- a/Lib/argparse.py +++ b/Lib/argparse.py @@ -178,9 +178,12 @@ class HelpFormatter(object): diff -Nru python3.11-3.11.8/debian/patches/deb-locations.diff python3.11-3.11.9/debian/patches/deb-locations.diff --- python3.11-3.11.8/debian/patches/deb-locations.diff 2024-02-25 16:41:26.000000000 +0000 +++ python3.11-3.11.9/debian/patches/deb-locations.diff 2024-04-06 17:59:24.000000000 +0000 @@ -10,7 +10,7 @@ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/Lib/pydoc.py b/Lib/pydoc.py -index 14b9903..ef35473 100755 +index a8cfeaf..660b345 100755 --- a/Lib/pydoc.py +++ b/Lib/pydoc.py @@ -31,6 +31,10 @@ to a file named ".html". diff -Nru python3.11-3.11.8/debian/patches/disable-sem-check.diff python3.11-3.11.9/debian/patches/disable-sem-check.diff --- python3.11-3.11.8/debian/patches/disable-sem-check.diff 2024-02-25 16:41:26.000000000 +0000 +++ python3.11-3.11.9/debian/patches/disable-sem-check.diff 2024-04-06 17:59:24.000000000 +0000 @@ -9,7 +9,7 @@ 1 file changed, 22 insertions(+), 12 deletions(-) diff --git a/configure.ac b/configure.ac -index 2a562fa..3e8544d 100644 +index f72d9c0..aff818e 100644 --- a/configure.ac +++ b/configure.ac @@ -5546,12 +5546,17 @@ AC_CACHE_CHECK([whether POSIX semaphores are enabled], [ac_cv_posix_semaphores_e diff -Nru python3.11-3.11.8/debian/patches/distutils-install-layout.diff python3.11-3.11.9/debian/patches/distutils-install-layout.diff --- python3.11-3.11.8/debian/patches/distutils-install-layout.diff 2024-02-25 16:41:26.000000000 +0000 +++ python3.11-3.11.9/debian/patches/distutils-install-layout.diff 2024-04-06 17:59:24.000000000 +0000 @@ -207,16 +207,16 @@ def test_debug_mode(self): diff --git a/Lib/pydoc.py b/Lib/pydoc.py -index ef35473..e5e800b 100755 +index 660b345..8a38c74 100755 --- a/Lib/pydoc.py +++ b/Lib/pydoc.py -@@ -512,6 +512,7 @@ class Doc: +@@ -527,6 +527,7 @@ class Doc: 'marshal', 'posix', 'signal', 'sys', '_thread', 'zipimport') or (file.startswith(basedir) and + not file.startswith(os.path.join(basedir, 'dist-packages')) and not file.startswith(os.path.join(basedir, 'site-packages')))) and - object.__name__ not in ('xml.etree', 'test.pydoc_mod')): + object.__name__ not in ('xml.etree', 'test.test_pydoc.pydoc_mod')): if docloc.startswith(("http://", "https://")): diff --git a/Lib/site.py b/Lib/site.py index 2904e44..4b63262 100644 diff -Nru python3.11-3.11.8/debian/patches/lib-argparse.diff python3.11-3.11.9/debian/patches/lib-argparse.diff --- python3.11-3.11.8/debian/patches/lib-argparse.diff 2024-02-25 16:41:26.000000000 +0000 +++ python3.11-3.11.9/debian/patches/lib-argparse.diff 2024-04-06 17:59:24.000000000 +0000 @@ -9,7 +9,7 @@ 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/Lib/argparse.py b/Lib/argparse.py -index a999ea6..85a592e 100644 +index 2f5ecda..cc7f84e 100644 --- a/Lib/argparse.py +++ b/Lib/argparse.py @@ -91,7 +91,16 @@ import sys as _sys diff -Nru python3.11-3.11.8/debian/patches/link-opt.diff python3.11-3.11.9/debian/patches/link-opt.diff --- python3.11-3.11.8/debian/patches/link-opt.diff 2024-02-25 16:41:26.000000000 +0000 +++ python3.11-3.11.9/debian/patches/link-opt.diff 2024-04-06 17:59:24.000000000 +0000 @@ -9,7 +9,7 @@ 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/configure.ac b/configure.ac -index bbe7f89..2a562fa 100644 +index 7b4000f..f72d9c0 100644 --- a/configure.ac +++ b/configure.ac @@ -3183,8 +3183,8 @@ then diff -Nru python3.11-3.11.8/debian/patches/multiarch.diff python3.11-3.11.9/debian/patches/multiarch.diff --- python3.11-3.11.8/debian/patches/multiarch.diff 2024-02-25 16:41:26.000000000 +0000 +++ python3.11-3.11.9/debian/patches/multiarch.diff 2024-04-06 17:59:24.000000000 +0000 @@ -40,7 +40,7 @@ srcdir = _CONFIG_VARS.get('srcdir', _PROJECT_BASE) if os.name == 'posix': diff --git a/Makefile.pre.in b/Makefile.pre.in -index 4d1921b..9219fb8 100644 +index 81d4d50..03ef307 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1273,6 +1273,7 @@ Modules/signalmodule.o: $(srcdir)/Modules/signalmodule.c $(srcdir)/Modules/posix diff -Nru python3.11-3.11.8/debian/patches/pydoc-use-pager.diff python3.11-3.11.9/debian/patches/pydoc-use-pager.diff --- python3.11-3.11.8/debian/patches/pydoc-use-pager.diff 2024-02-25 16:41:26.000000000 +0000 +++ python3.11-3.11.9/debian/patches/pydoc-use-pager.diff 2024-04-06 17:59:24.000000000 +0000 @@ -9,10 +9,10 @@ 1 file changed, 2 insertions(+) diff --git a/Lib/pydoc.py b/Lib/pydoc.py -index e5e800b..bfa3e58 100755 +index 8a38c74..83a027f 100755 --- a/Lib/pydoc.py +++ b/Lib/pydoc.py -@@ -1582,6 +1582,8 @@ def getpager(): +@@ -1665,6 +1665,8 @@ def getpager(): return plainpager if sys.platform == 'win32': return lambda text: tempfilepager(plain(text), 'more <') diff -Nru python3.11-3.11.8/debian/patches/reproducible-buildinfo.diff python3.11-3.11.9/debian/patches/reproducible-buildinfo.diff --- python3.11-3.11.8/debian/patches/reproducible-buildinfo.diff 2024-02-25 16:41:26.000000000 +0000 +++ python3.11-3.11.9/debian/patches/reproducible-buildinfo.diff 2024-04-06 17:59:24.000000000 +0000 @@ -9,7 +9,7 @@ 1 file changed, 2 insertions(+) diff --git a/Makefile.pre.in b/Makefile.pre.in -index 9219fb8..6e8113e 100644 +index 03ef307..68f499c 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1243,6 +1243,8 @@ Modules/getbuildinfo.o: $(PARSER_OBJS) \ diff -Nru python3.11-3.11.8/debian/patches/tkinter-import.diff python3.11-3.11.9/debian/patches/tkinter-import.diff --- python3.11-3.11.8/debian/patches/tkinter-import.diff 2024-02-25 16:41:26.000000000 +0000 +++ python3.11-3.11.9/debian/patches/tkinter-import.diff 2024-04-06 17:59:24.000000000 +0000 @@ -9,7 +9,7 @@ 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/Lib/tkinter/__init__.py b/Lib/tkinter/__init__.py -index 28df96a..cecf021 100644 +index 254f63f..29d6e1e 100644 --- a/Lib/tkinter/__init__.py +++ b/Lib/tkinter/__init__.py @@ -35,7 +35,10 @@ import enum diff -Nru python3.11-3.11.8/debian/rules python3.11-3.11.9/debian/rules --- python3.11-3.11.8/debian/rules 2024-02-25 16:41:26.000000000 +0000 +++ python3.11-3.11.9/debian/rules 2024-04-06 17:59:24.000000000 +0000 @@ -83,7 +83,7 @@ else echo Unknown; fi) VER=3.11 -SVER=3.11.8 +SVER=3.11.9 NVER=3.12 PVER=python$(VER) EXT_VER=$(subst .,,$(VER))