diff --git a/.github/workflows/create-wheels.yaml b/.github/workflows/create-wheels.yaml index a82642075..2a469dee4 100644 --- a/.github/workflows/create-wheels.yaml +++ b/.github/workflows/create-wheels.yaml @@ -29,7 +29,8 @@ jobs: - "3.8" - "3.9" - "3.10" - - "3.11.0-rc - 3.11" + - "3.11" + - "3.12" architecture: - x64 @@ -99,6 +100,7 @@ jobs: - cp39-cp39 - cp310-cp310 - cp311-cp311 + - cp312-cp312 architecture: - x64 @@ -230,6 +232,7 @@ jobs: - cp39-cp39 - cp310-cp310 - cp311-cp311 + - cp312-cp312 architecture: - aarch64 - s390x diff --git a/.github/workflows/mintest.yaml b/.github/workflows/mintest.yaml index 6fbe2c640..cf2ff0d78 100644 --- a/.github/workflows/mintest.yaml +++ b/.github/workflows/mintest.yaml @@ -18,6 +18,7 @@ jobs: - "3.8" - "3.10" - "3.11" + - "3.12" steps: - name: Checkout repo @@ -31,7 +32,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install -U tox + pip install -U setuptools tox wheel python --version pip --version tox --version diff --git a/.github/workflows/tests-mailman.yaml b/.github/workflows/tests-mailman.yaml index 2243bcce4..60fd7691a 100644 --- a/.github/workflows/tests-mailman.yaml +++ b/.github/workflows/tests-mailman.yaml @@ -3,9 +3,12 @@ name: Run tests (GNU Mailman 3) on: # Trigger the workflow on master but also allow it to run manually. workflow_dispatch: - push: - branches: - - master + + # NOTE(vytas): Disabled as it is failing as of 2023-09. + # Maybe @maxking just needs to update the Docker image (?) + # push: + # branches: + # - master jobs: run_tox: diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 398fd0046..6a0ddbe40 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -76,6 +76,12 @@ jobs: - python-version: "3.11" os: ubuntu-latest toxenv: py311_cython + - python-version: "3.12" + os: ubuntu-latest + toxenv: py312 + - python-version: "3.12" + os: ubuntu-latest + toxenv: py312_cython - python-version: "3.10" os: macos-latest toxenv: py310_nocover diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 000000000..5d3177879 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,21 @@ +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the version of Python and other tools you might need +build: + os: ubuntu-22.04 + tools: + python: "3.10" + +# Build documentation in the docs/ directory with Sphinx +sphinx: + configuration: docs/conf.py + +# We recommend specifying your dependencies to enable reproducible builds: +# https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html +python: + install: + - requirements: requirements/docs diff --git a/AUTHORS b/AUTHORS index 96702c6a0..fece25507 100644 --- a/AUTHORS +++ b/AUTHORS @@ -131,6 +131,7 @@ listed below by date of first contribution: * John G G (john-g-g) * Aryan Iyappan (aryaniyaps) * Eujin Ong (euj1n0ng) +* Libor Jelínek (liborjelinek) (et al.) diff --git a/docs/_newsfragments/1977.breakingchange.rst b/docs/_newsfragments/1977.breakingchange.rst new file mode 100644 index 000000000..ff6b9c1c3 --- /dev/null +++ b/docs/_newsfragments/1977.breakingchange.rst @@ -0,0 +1,4 @@ +Previously, it was possible to create an :class:`~falcon.App` with the +``cors_enable`` option, and add additional :class:`~falcon.CORSMiddleware`, +leading to unexpected behavior and dysfunctional CORS. This combination now +explicitly results in a :class:`ValueError`. diff --git a/docs/_newsfragments/1999.breakingchange.rst b/docs/_newsfragments/1999.breakingchange.rst new file mode 100644 index 000000000..6cac71257 --- /dev/null +++ b/docs/_newsfragments/1999.breakingchange.rst @@ -0,0 +1,6 @@ +The default value of the ``csv`` parameter in +:func:`~falcon.uri.parse_query_string` was changed to ``False``, matching the +default behavior of other parts of the framework (such as +:attr:`req.params `, the test client, etc). +If the old behavior fits your use case better, pass the ``csv=True`` keyword +argument explicitly. diff --git a/docs/_newsfragments/2051.bugfix.rst b/docs/_newsfragments/2051.bugfix.rst deleted file mode 100644 index 7f51c1097..000000000 --- a/docs/_newsfragments/2051.bugfix.rst +++ /dev/null @@ -1,4 +0,0 @@ -Some essential files were unintentionally omitted from the source distribution -archive, rendering it unsuitable to run the test suite off. -This has been fixed, and the ``sdist`` tarball should now be usable as a base -for packaging Falcon in OS distributions. diff --git a/docs/_newsfragments/2146.bugfix.rst b/docs/_newsfragments/2146.bugfix.rst deleted file mode 100644 index ab27e52ba..000000000 --- a/docs/_newsfragments/2146.bugfix.rst +++ /dev/null @@ -1,6 +0,0 @@ - -:ref:`WebSocket ` implementation has been fixed to properly handle -:class:`~falcon.HTTPError` and :class:`~falcon.HTTPStatus` exceptions raised by -custom :func:`error handlers `. -The WebSocket connection is now correctly closed with an appropriate code -instead of bubbling up an unhandled error to the application server. diff --git a/docs/_newsfragments/2157.bugfix.rst b/docs/_newsfragments/2157.bugfix.rst deleted file mode 100644 index f2a33e7f1..000000000 --- a/docs/_newsfragments/2157.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -:class:`~falcon.testing.TestCase` mimics behavior of real WSGI servers following WSGI spec where is said that ``PATH_INFO`` CGI variable for WSGI app is already percent-decoded. However, this breaks routing if slash (encoded as ``%2F``) is part of path element, not a path separator, as explained in the FAQ :ref:`routing_encoded_slashes`. The workaround based on some WSGI servers' non-standard CGI variables described in :ref:`raw_url_path_recipe` recipe breaks tests because :py:func:`falcon.testing.helpers.create_environ` hard-code CGI variable ``RAW_URI`` to ``/`` instead to real path *before* percent-decoding. \ No newline at end of file diff --git a/docs/api/cors.rst b/docs/api/cors.rst index 40bec453a..d19bf7a78 100644 --- a/docs/api/cors.rst +++ b/docs/api/cors.rst @@ -60,9 +60,15 @@ Usage allow_origins='example.com', allow_credentials='*')) .. note:: - Passing the ``cors_enable`` parameter set to ``True`` should be seen as - mutually exclusive with directly passing an instance of - :class:`~falcon.CORSMiddleware` to the application's initializer. + Passing the ``cors_enable`` parameter set to ``True`` is mutually exclusive + with directly passing an instance of :class:`~falcon.CORSMiddleware` to the + application's initializer. + + .. versionchanged:: 4.0 + + Attempt to use the combination of ``cors_enable=True`` and an additional + instance of :class:`~falcon.CORSMiddleware` now results in a + :class:`ValueError`. CORSMiddleware -------------- diff --git a/docs/changes/3.1.2.rst b/docs/changes/3.1.2.rst new file mode 100644 index 000000000..9720b9161 --- /dev/null +++ b/docs/changes/3.1.2.rst @@ -0,0 +1,54 @@ +Changelog for Falcon 3.1.2 +========================== + +Summary +------- + +This is a minor point release fixing a couple of high impact bugs, +as well as publishing binary wheels for the recently released CPython 3.12. + + +Changes to Supported Platforms +------------------------------ + +- Falcon is now supported (including binary wheels) on CPython 3.12. + A couple of remaining stdlib deprecations from 3.11 and 3.12 will be + addressed in Falcon 4.0. +- As with the previous release, Python 3.5 & 3.6 remain deprecated and + will no longer be supported in Falcon 4.0. +- EOL Python 3.7 will no longer be actively supported in 4.0, but the framework + should still continue to install from source. We may remove the support for + 3.7 altogether later in the 4.x series if we are faced with incompatible + ecosystem changes in typing, Cython, etc. + + +Fixed +----- + +- Some essential files were unintentionally omitted from the source distribution + archive, rendering it unsuitable to run the test suite off. + This has been fixed, and the ``sdist`` tarball should now be usable as a base + for packaging Falcon in OS distributions. (`#2051 `__) +- :ref:`WebSocket ` implementation has been fixed to properly handle + :class:`~falcon.HTTPError` and :class:`~falcon.HTTPStatus` exceptions raised by + custom :func:`error handlers `. + The WebSocket connection is now correctly closed with an appropriate code + instead of bubbling up an unhandled error to the application server. (`#2146 `__) +- Falcon's :class:`~falcon.testing.TestClient` mimics the behavior of real WSGI + servers (and the WSGI spec) by presenting the ``PATH_INFO`` CGI variable + already in the percent-decoded form. However, the client also used to + indiscriminately set the non-standard ``RAW_URI`` CGI variable to ``/``, which + made writing tests for apps :ref:`decoding raw URL path ` + cumbersome. This has been fixed, and the raw path of a simulated request is now + preserved in ``RAW_URI``. (`#2157 `__) + + +Contributors to this Release +---------------------------- + +Many thanks to those who contributed to this bugfix release: + +- `CaselIT `__ +- `kgriffs `__ +- `liborjelinek `__ +- `vytas7 `__ diff --git a/docs/changes/3.1.3.rst b/docs/changes/3.1.3.rst new file mode 100644 index 000000000..416e77133 --- /dev/null +++ b/docs/changes/3.1.3.rst @@ -0,0 +1,11 @@ +Changelog for Falcon 3.1.3 +========================== + +Summary +------- + +This is a minor bugfix release that only pins the ``pytest-asyncio`` test +dependency in order to prevent an incompatible version from interfering with +the build workflow. + +This release is otherwise identical to :doc:`Falcon 3.1.2 <3.1.2>`. diff --git a/docs/changes/4.0.0.rst b/docs/changes/4.0.0.rst index 457343885..93359c678 100644 --- a/docs/changes/4.0.0.rst +++ b/docs/changes/4.0.0.rst @@ -13,7 +13,12 @@ Changes to Supported Platforms ------------------------------ - CPython 3.11 is now fully supported. (`#2072 `__) +- CPython 3.12 will be fully supported. (`#2196 `__) - End-of-life Python 3.5 & 3.6 are no longer supported. (`#2074 `__) +- Python 3.7 is no longer actively supported, but the framework should still + continue to install from source. We may remove the support for 3.7 altogether + later in the 4.x series if we are faced with incompatible ecosystem changes + in typing, Cython, etc. .. towncrier release notes start diff --git a/docs/changes/index.rst b/docs/changes/index.rst index f122227ae..06371e311 100644 --- a/docs/changes/index.rst +++ b/docs/changes/index.rst @@ -4,6 +4,8 @@ Changelogs .. toctree:: 4.0.0 <4.0.0> + 3.1.3 <3.1.3> + 3.1.2 <3.1.2> 3.1.1 <3.1.1> 3.1.0 <3.1.0> 3.0.1 <3.0.1> diff --git a/docs/conf.py b/docs/conf.py index 3e2a2c05a..1b4b4ecb9 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -82,13 +82,13 @@ # General information about the project. project = 'Falcon' -copyright = '{year} Falcon Contributors'.format(year=datetime.utcnow().year) +copyright = '{year} Falcon Contributors'.format(year=datetime.now().year) # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. -cfg = configparser.SafeConfigParser() +cfg = configparser.ConfigParser() cfg.read('../setup.cfg') tag = cfg.get('egg_info', 'tag_build') @@ -168,7 +168,7 @@ 'github_user': 'falconry', 'github_repo': 'falcon', 'github_button': False, - 'github_banner': not dash_build, + 'github_banner': False, 'fixed_sidebar': False, 'show_powered_by': False, 'extra_nav_links': OrderedDict( diff --git a/docs/ext/rfc.py b/docs/ext/rfc.py index 5288c43a3..608f22d96 100644 --- a/docs/ext/rfc.py +++ b/docs/ext/rfc.py @@ -23,7 +23,7 @@ import re -RFC_PATTERN = re.compile('RFC (\d{4}), Section ([\d\.]+)') +RFC_PATTERN = re.compile(r'RFC (\d{4}), Section ([\d\.]+)') def _render_section(section_number, rfc_number): diff --git a/examples/asgilook/tests/conftest.py b/examples/asgilook/tests/conftest.py index 565bb970c..420a256df 100644 --- a/examples/asgilook/tests/conftest.py +++ b/examples/asgilook/tests/conftest.py @@ -38,8 +38,15 @@ def storage_path(tmpdir_factory): @pytest.fixture def client(predictable_uuid, storage_path): + # NOTE(vytas): Unlike the sync FakeRedis, fakeredis.aioredis.FakeRedis + # seems to share a global state in 2.17.0 (by design or oversight). + # Make sure we initialize a new fake server for every test case. + def fake_redis_from_url(*args, **kwargs): + server = fakeredis.FakeServer() + return fakeredis.aioredis.FakeRedis(server=server) + config = Config() - config.redis_from_url = fakeredis.aioredis.FakeRedis.from_url + config.redis_from_url = fake_redis_from_url config.redis_host = 'redis://localhost' config.storage_path = storage_path config.uuid_generator = predictable_uuid diff --git a/falcon/app.py b/falcon/app.py index 442d41676..434dd6b39 100644 --- a/falcon/app.py +++ b/falcon/app.py @@ -20,6 +20,7 @@ import re import traceback from typing import Callable, Iterable, Optional, Tuple, Type, Union +import warnings from falcon import app_helpers as helpers from falcon import constants @@ -243,6 +244,7 @@ def __init__( cors_enable=False, sink_before_static_route=True, ): + self._cors_enable = cors_enable self._sink_before_static_route = sink_before_static_route self._sinks = [] self._static_routes = [] @@ -446,7 +448,7 @@ def __call__( # noqa: C901 def router_options(self): return self._router.options - def add_middleware(self, middleware: object) -> None: + def add_middleware(self, middleware: Union[object, Iterable]) -> None: """Add one or more additional middleware components. Arguments: @@ -460,10 +462,28 @@ def add_middleware(self, middleware: object) -> None: # the chance that middleware may be None. if middleware: try: - self._unprepared_middleware += middleware + middleware = list(middleware) # type: ignore except TypeError: # middleware is not iterable; assume it is just one bare component - self._unprepared_middleware.append(middleware) + middleware = [middleware] + + if ( + self._cors_enable + and len( + [ + mc + for mc in self._unprepared_middleware + middleware + if isinstance(mc, CORSMiddleware) + ] + ) + > 1 + ): + raise ValueError( + 'CORSMiddleware is not allowed in conjunction with ' + 'cors_enable (which already constructs one instance)' + ) + + self._unprepared_middleware += middleware # NOTE(kgriffs): Even if middleware is None or an empty list, we still # need to make sure self._middleware is initialized if this is the @@ -828,6 +848,12 @@ def handler(req, resp, ex, params): ('ex',), ('exception',), ) or arg_names[1:3] in (('req', 'resp'), ('request', 'response')): + warnings.warn( + f'handler is using a deprecated signature; please order its ' + f'arguments as {handler.__qualname__}(req, resp, ex, params). ' + f'This compatibility shim will be removed in Falcon 5.0.', + deprecation.DeprecatedWarning, + ) handler = wrap_old_handler(handler) exception_tuple: tuple diff --git a/falcon/asgi/ws.py b/falcon/asgi/ws.py index e7db3d5fa..db3700b2d 100644 --- a/falcon/asgi/ws.py +++ b/falcon/asgi/ws.py @@ -530,12 +530,14 @@ class WebSocketOptions: __slots__ = ['error_close_code', 'max_receive_queue', 'media_handlers'] - def __init__(self): + def __init__(self) -> None: try: import msgpack except ImportError: msgpack = None + bin_handler: media.BinaryBaseHandlerWS + if msgpack: bin_handler = media.MessagePackHandlerWS() else: diff --git a/falcon/cyutil/uri.pyx b/falcon/cyutil/uri.pyx index da3b2b296..477e7cc7e 100644 --- a/falcon/cyutil/uri.pyx +++ b/falcon/cyutil/uri.pyx @@ -251,7 +251,7 @@ cdef cy_parse_query_string(unsigned char* data, Py_ssize_t length, def parse_query_string(unicode query_string not None, bint keep_blank=False, - bint csv=True): + bint csv=False): cdef bytes byte_string = query_string.encode('utf-8') cdef unsigned char* data = byte_string return cy_parse_query_string(data, len(byte_string), keep_blank, csv) diff --git a/falcon/inspect.py b/falcon/inspect.py index d57d451af..62fc74c28 100644 --- a/falcon/inspect.py +++ b/falcon/inspect.py @@ -15,11 +15,11 @@ """Inspect utilities for falcon applications.""" from functools import partial import inspect -from typing import Callable -from typing import Dict +from typing import Callable # NOQA: F401 +from typing import Dict # NOQA: F401 from typing import List from typing import Optional -from typing import Type +from typing import Type # NOQA: F401 from falcon import app_helpers from falcon.app import App diff --git a/falcon/media/handlers.py b/falcon/media/handlers.py index bd3898d3c..0186e0aee 100644 --- a/falcon/media/handlers.py +++ b/falcon/media/handlers.py @@ -6,6 +6,7 @@ from falcon.constants import MEDIA_MULTIPART from falcon.constants import MEDIA_URLENCODED from falcon.constants import PYPY +from falcon.media.base import BinaryBaseHandlerWS from falcon.media.json import JSONHandler from falcon.media.multipart import MultipartFormHandler from falcon.media.multipart import MultipartParseOptions @@ -15,7 +16,7 @@ from falcon.vendor import mimeparse -class MissingDependencyHandler: +class MissingDependencyHandler(BinaryBaseHandlerWS): """Placeholder handler that always raises an error. This handler is used by the framework for media types that require an diff --git a/falcon/routing/compiled.py b/falcon/routing/compiled.py index 239467823..0b45edbc1 100644 --- a/falcon/routing/compiled.py +++ b/falcon/routing/compiled.py @@ -29,7 +29,7 @@ from falcon.util.sync import wrap_sync_to_async if TYPE_CHECKING: - from typing import Any + from typing import Any # NOQA: F401 _TAB_STR = ' ' * 4 _FIELD_PATTERN = re.compile( diff --git a/falcon/stream.py b/falcon/stream.py index d76a8b06d..c300ee98a 100644 --- a/falcon/stream.py +++ b/falcon/stream.py @@ -14,11 +14,17 @@ """WSGI BoundedStream class.""" +from __future__ import annotations + import io +from typing import BinaryIO, Callable, List, Optional, TypeVar, Union __all__ = ['BoundedStream'] +Result = TypeVar('Result', bound=Union[bytes, List[bytes]]) + + class BoundedStream(io.IOBase): """Wrap *wsgi.input* streams to make them more robust. @@ -45,21 +51,21 @@ class BoundedStream(io.IOBase): """ - def __init__(self, stream, stream_len): + def __init__(self, stream: BinaryIO, stream_len: int) -> None: self.stream = stream self.stream_len = stream_len self._bytes_remaining = self.stream_len - def __iter__(self): + def __iter__(self) -> BoundedStream: return self - def __next__(self): + def __next__(self) -> bytes: return next(self.stream) next = __next__ - def _read(self, size, target): + def _read(self, size: Optional[int], target: Callable[[int], Result]) -> Result: """Proxy reads to the underlying stream. Args: @@ -85,19 +91,19 @@ def _read(self, size, target): self._bytes_remaining -= size return target(size) - def readable(self): + def readable(self) -> bool: """Return ``True`` always.""" return True - def seekable(self): + def seekable(self) -> bool: """Return ``False`` always.""" return False - def writable(self): + def writable(self) -> bool: """Return ``False`` always.""" return False - def read(self, size=None): + def read(self, size: Optional[int] = None) -> bytes: """Read from the stream. Args: @@ -111,7 +117,7 @@ def read(self, size=None): return self._read(size, self.stream.read) - def readline(self, limit=None): + def readline(self, limit: Optional[int] = None) -> bytes: """Read a line from the stream. Args: @@ -125,7 +131,7 @@ def readline(self, limit=None): return self._read(limit, self.stream.readline) - def readlines(self, hint=None): + def readlines(self, hint: Optional[int] = None) -> List[bytes]: """Read lines from the stream. Args: @@ -139,12 +145,12 @@ def readlines(self, hint=None): return self._read(hint, self.stream.readlines) - def write(self, data): + def write(self, data: bytes) -> None: """Raise IOError always; writing is not supported.""" raise IOError('Stream is not writeable') - def exhaust(self, chunk_size=64 * 1024): + def exhaust(self, chunk_size: int = 64 * 1024) -> None: """Exhaust the stream. This consumes all the data left until the limit is reached. @@ -159,7 +165,7 @@ def exhaust(self, chunk_size=64 * 1024): break @property - def eof(self): + def eof(self) -> bool: return self._bytes_remaining <= 0 is_exhausted = eof diff --git a/falcon/testing/helpers.py b/falcon/testing/helpers.py index 3388ec3f0..00959495a 100644 --- a/falcon/testing/helpers.py +++ b/falcon/testing/helpers.py @@ -402,6 +402,8 @@ class ASGIWebSocketSimulator: ``None`` if the connection has not been accepted. """ + _DEFAULT_WAIT_READY_TIMEOUT = 5 + def __init__(self): self.__msgpack = None @@ -435,7 +437,7 @@ def subprotocol(self) -> str: def headers(self) -> Iterable[Iterable[bytes]]: return self._accepted_headers - async def wait_ready(self, timeout: Optional[int] = 5): + async def wait_ready(self, timeout: Optional[int] = None): """Wait until the connection has been accepted or denied. This coroutine can be awaited in order to pause execution until the @@ -447,16 +449,17 @@ async def wait_ready(self, timeout: Optional[int] = 5): raising an error (default: ``5``). """ + timeout = timeout or self._DEFAULT_WAIT_READY_TIMEOUT + try: await asyncio.wait_for(self._event_handshake_complete.wait(), timeout) except asyncio.TimeoutError: msg = ( - 'Timed out after waiting {} seconds for ' - 'the WebSocket handshake to complete. Check the ' - 'on_websocket responder and ' - 'any middleware for any conditions that may be stalling the ' - 'request flow.' - ).format(timeout) + f'Timed out after waiting {timeout} seconds for the WebSocket ' + f'handshake to complete. Check the on_websocket responder and ' + f'any middleware for any conditions that may be stalling the ' + f'request flow.' + ) raise asyncio.TimeoutError(msg) self._require_accepted() diff --git a/falcon/util/__init__.py b/falcon/util/__init__.py index 6de8d40d1..3fec8b06e 100644 --- a/falcon/util/__init__.py +++ b/falcon/util/__init__.py @@ -21,6 +21,7 @@ from http import cookies as http_cookies import sys +from types import ModuleType # Hoist misc. utils from falcon.constants import PYTHON_VERSION @@ -77,7 +78,7 @@ ) -def __getattr__(name): +def __getattr__(name: str) -> ModuleType: if name == 'json': import warnings import json # NOQA @@ -86,7 +87,6 @@ def __getattr__(name): 'Importing json from "falcon.util" is deprecated.', DeprecatedWarning ) return json - from types import ModuleType # fallback to the default implementation mod = sys.modules[__name__] diff --git a/falcon/util/deprecation.py b/falcon/util/deprecation.py index a0a62e25c..5e2607ad7 100644 --- a/falcon/util/deprecation.py +++ b/falcon/util/deprecation.py @@ -18,6 +18,9 @@ """ import functools +from typing import Any +from typing import Callable +from typing import Optional import warnings @@ -41,7 +44,9 @@ class DeprecatedWarning(UserWarning): pass -def deprecated(instructions, is_property=False, method_name=None): +def deprecated( + instructions: str, is_property: bool = False, method_name: Optional[str] = None +) -> Callable[[Callable[..., Any]], Any]: """Flag a method as deprecated. This function returns a decorator which can be used to mark deprecated @@ -60,7 +65,7 @@ def deprecated(instructions, is_property=False, method_name=None): """ - def decorator(func): + def decorator(func: Callable[..., Any]) -> Callable[[Callable[..., Any]], Any]: object_name = 'property' if is_property else 'function' post_name = '' if is_property else '(...)' @@ -69,7 +74,7 @@ def decorator(func): ) @functools.wraps(func) - def wrapper(*args, **kwargs): + def wrapper(*args: Any, **kwargs: Any) -> Callable[..., Any]: warnings.warn(message, category=DeprecatedWarning, stacklevel=2) return func(*args, **kwargs) @@ -79,7 +84,9 @@ def wrapper(*args, **kwargs): return decorator -def deprecated_args(*, allowed_positional, is_method=True): +def deprecated_args( + *, allowed_positional: int, is_method: bool = True +) -> Callable[..., Callable[..., Any]]: """Flag a method call with positional args as deprecated. Keyword Args: @@ -98,9 +105,9 @@ def deprecated_args(*, allowed_positional, is_method=True): if is_method: allowed_positional += 1 - def deprecated_args(fn): + def deprecated_args(fn: Callable[..., Any]) -> Callable[..., Callable[..., Any]]: @functools.wraps(fn) - def wraps(*args, **kwargs): + def wraps(*args: Any, **kwargs: Any) -> Callable[..., Any]: if len(args) > allowed_positional: warnings.warn( warn_text.format(fn=fn.__qualname__), diff --git a/falcon/util/misc.py b/falcon/util/misc.py index 1c05d1090..3690aeca4 100644 --- a/falcon/util/misc.py +++ b/falcon/util/misc.py @@ -22,12 +22,17 @@ now = falcon.http_now() """ - import datetime import functools import http import inspect import re +from typing import Any +from typing import Callable +from typing import Dict +from typing import List +from typing import Tuple +from typing import Union import unicodedata from falcon import status_codes @@ -69,18 +74,30 @@ _UNSAFE_CHARS = re.compile(r'[^a-zA-Z0-9.-]') # PERF(kgriffs): Avoid superfluous namespace lookups -strptime = datetime.datetime.strptime -utcnow = datetime.datetime.utcnow +_strptime: Callable[[str, str], datetime.datetime] = datetime.datetime.strptime +_utcnow: Callable[[], datetime.datetime] = functools.partial( + datetime.datetime.now, datetime.timezone.utc +) + +# The above aliases were not underscored prior to Falcon 3.1.2. +strptime: Callable[[str, str], datetime.datetime] = deprecated( + 'This was a private alias local to this module; ' + 'please reference datetime.strptime() directly.' +)(datetime.datetime.strptime) +utcnow: Callable[[], datetime.datetime] = deprecated( + 'This was a private alias local to this module; ' + 'please reference datetime.utcnow() directly.' +)(datetime.datetime.utcnow) # NOTE(kgriffs,vytas): This is tested in the PyPy gate but we do not want devs # to have to install PyPy to check coverage on their workstations, so we use # the nocover pragma here. -def _lru_cache_nop(*args, **kwargs): # pragma: nocover - def decorator(func): +def _lru_cache_nop(maxsize: int) -> Callable[[Callable], Callable]: # pragma: nocover + def decorator(func: Callable) -> Callable: # NOTE(kgriffs): Partially emulate the lru_cache protocol; only add # cache_info() later if/when it becomes necessary. - func.cache_clear = lambda: None + func.cache_clear = lambda: None # type: ignore return func @@ -95,7 +112,7 @@ def decorator(func): _lru_cache_for_simple_logic = functools.lru_cache # type: ignore -def is_python_func(func): +def is_python_func(func: Union[Callable, Any]) -> bool: """Determine if a function or method uses a standard Python type. This helper can be used to check a function or method to determine if it @@ -127,7 +144,7 @@ def http_now() -> str: e.g., 'Tue, 15 Nov 1994 12:45:26 GMT'. """ - return dt_to_http(utcnow()) + return dt_to_http(_utcnow()) def dt_to_http(dt: datetime.datetime) -> str: @@ -171,7 +188,7 @@ def http_date_to_dt(http_date: str, obs_date: bool = False) -> datetime.datetime # over it, and setting up exception handling blocks each # time around the loop, in the case that we don't actually # need to check for multiple formats. - return strptime(http_date, '%a, %d %b %Y %H:%M:%S %Z') + return _strptime(http_date, '%a, %d %b %Y %H:%M:%S %Z') time_formats = ( '%a, %d %b %Y %H:%M:%S %Z', @@ -183,7 +200,7 @@ def http_date_to_dt(http_date: str, obs_date: bool = False) -> datetime.datetime # Loop through the formats and return the first that matches for time_format in time_formats: try: - return strptime(http_date, time_format) + return _strptime(http_date, time_format) except ValueError: continue @@ -251,7 +268,7 @@ def to_query_str( return query_str[:-1] -def get_bound_method(obj, method_name): +def get_bound_method(obj: object, method_name: str) -> Union[None, Callable[..., Any]]: """Get a bound method of the given object by name. Args: @@ -278,7 +295,7 @@ def get_bound_method(obj, method_name): return method -def get_argnames(func): +def get_argnames(func: Callable) -> List[str]: """Introspect the arguments of a callable. Args: @@ -308,7 +325,9 @@ def get_argnames(func): @deprecated('Please use falcon.code_to_http_status() instead.') -def get_http_status(status_code, default_reason=_DEFAULT_HTTP_REASON): +def get_http_status( + status_code: Union[str, int], default_reason: str = _DEFAULT_HTTP_REASON +) -> str: """Get both the http status code and description from just a code. Warning: @@ -387,7 +406,7 @@ def secure_filename(filename: str) -> str: @_lru_cache_for_simple_logic(maxsize=64) -def http_status_to_code(status): +def http_status_to_code(status: Union[http.HTTPStatus, int, bytes, str]) -> int: """Normalize an HTTP status to an integer code. This function takes a member of :class:`http.HTTPStatus`, an HTTP status @@ -425,7 +444,7 @@ def http_status_to_code(status): @_lru_cache_for_simple_logic(maxsize=64) -def code_to_http_status(status): +def code_to_http_status(status: Union[int, http.HTTPStatus, bytes, str]) -> str: """Normalize an HTTP status to an HTTP status line string. This function takes a member of :class:`http.HTTPStatus`, an ``int`` status @@ -473,7 +492,7 @@ def code_to_http_status(status): return '{} {}'.format(code, _DEFAULT_HTTP_REASON) -def _encode_items_to_latin1(data): +def _encode_items_to_latin1(data: Dict[str, str]) -> List[Tuple[bytes, bytes]]: """Decode all key/values of a dict to Latin-1. Args: @@ -491,7 +510,7 @@ def _encode_items_to_latin1(data): return result -def _isascii(string: str): +def _isascii(string: str) -> bool: """Return ``True`` if all characters in the string are ASCII. ASCII characters have code points in the range U+0000-U+007F. diff --git a/falcon/util/reader.py b/falcon/util/reader.py index dca60d094..263d66716 100644 --- a/falcon/util/reader.py +++ b/falcon/util/reader.py @@ -13,9 +13,14 @@ # limitations under the License. """Buffered stream reader.""" +from __future__ import annotations import functools import io +from typing import Callable +from typing import IO +from typing import List +from typing import Optional from falcon.errors import DelimiterError @@ -26,7 +31,12 @@ class BufferedReader: - def __init__(self, read, max_stream_len, chunk_size=None): + def __init__( + self, + read: Callable[[int], bytes], + max_stream_len: int, + chunk_size: Optional[int] = None, + ): self._read_func = read self._chunk_size = chunk_size or DEFAULT_CHUNK_SIZE self._max_join_size = self._chunk_size * _MAX_JOIN_CHUNKS @@ -36,7 +46,7 @@ def __init__(self, read, max_stream_len, chunk_size=None): self._buffer_pos = 0 self._max_bytes_remaining = max_stream_len - def _perform_read(self, size): + def _perform_read(self, size: int) -> bytes: # PERF(vytas): In Cython, bind types: # cdef bytes chunk # cdef Py_ssize_t chunk_len @@ -75,7 +85,7 @@ def _perform_read(self, size): self._max_bytes_remaining -= chunk_len result.write(chunk) - def _fill_buffer(self): + def _fill_buffer(self) -> None: # PERF(vytas): In Cython, bind types: # cdef Py_ssize_t read_size @@ -92,7 +102,7 @@ def _fill_buffer(self): self._buffer_len = len(self._buffer) - def peek(self, size=-1): + def peek(self, size: int = -1) -> bytes: if size < 0 or size > self._chunk_size: size = self._chunk_size @@ -101,7 +111,7 @@ def peek(self, size=-1): return self._buffer[self._buffer_pos : self._buffer_pos + size] - def _normalize_size(self, size): + def _normalize_size(self, size: Optional[int]) -> int: # PERF(vytas): In Cython, bind types: # cdef Py_ssize_t result # cdef Py_ssize_t max_size @@ -112,10 +122,10 @@ def _normalize_size(self, size): return max_size return size - def read(self, size=-1): + def read(self, size: int = -1) -> bytes: return self._read(self._normalize_size(size)) - def _read(self, size): + def _read(self, size: int) -> bytes: # PERF(vytas): In Cython, bind types: # cdef Py_ssize_t read_size # cdef bytes result @@ -150,7 +160,9 @@ def _read(self, size): self._buffer_pos = read_size return result + self._buffer[:read_size] - def read_until(self, delimiter, size=-1, consume_delimiter=False): + def read_until( + self, delimiter: bytes, size: int = -1, consume_delimiter: bool = False + ) -> bytes: # PERF(vytas): In Cython, bind types: # cdef Py_ssize_t read_size # cdef result @@ -168,15 +180,15 @@ def read_until(self, delimiter, size=-1, consume_delimiter=False): def _finalize_read_until( self, - size, - backlog, - have_bytes, - consume_bytes, - delimiter=None, - delimiter_pos=-1, - next_chunk=None, - next_chunk_len=0, - ): + size: int, + backlog: List[bytes], + have_bytes: int, + consume_bytes: int, + delimiter: Optional[bytes] = None, + delimiter_pos: int = -1, + next_chunk: Optional[bytes] = None, + next_chunk_len: int = 0, + ) -> bytes: if delimiter_pos < 0 and delimiter is not None: delimiter_pos = self._buffer.find(delimiter, self._buffer_pos) @@ -192,6 +204,7 @@ def _finalize_read_until( ret_value = b''.join(backlog) if next_chunk_len > 0: + assert next_chunk if self._buffer_len == 0: self._buffer = next_chunk self._buffer_len = next_chunk_len @@ -214,7 +227,9 @@ def _finalize_read_until( return ret_value - def _read_until(self, delimiter, size, consume_delimiter): + def _read_until( + self, delimiter: bytes, size: int, consume_delimiter: bool + ) -> bytes: # PERF(vytas): In Cython, bind types: # cdef list result = [] # cdef Py_ssize_t have_bytes = 0 @@ -223,7 +238,7 @@ def _read_until(self, delimiter, size, consume_delimiter): # cdef Py_ssize_t consume_bytes # cdef Py_ssize_t offset - result = [] + result: List[bytes] = [] have_bytes = 0 delimiter_len_1 = len(delimiter) - 1 delimiter_pos = -1 @@ -321,7 +336,7 @@ def _read_until(self, delimiter, size, consume_delimiter): self._buffer_pos = 0 self._buffer = next_chunk - def pipe(self, destination=None): + def pipe(self, destination: Optional[IO] = None) -> None: while True: chunk = self.read(self._chunk_size) if not chunk: @@ -331,8 +346,12 @@ def pipe(self, destination=None): destination.write(chunk) def pipe_until( - self, delimiter, destination=None, consume_delimiter=False, _size=None - ): + self, + delimiter: bytes, + destination: Optional[IO] = None, + consume_delimiter: bool = False, + _size: Optional[int] = None, + ) -> None: # PERF(vytas): In Cython, bind types: # cdef Py_ssize_t remaining @@ -354,14 +373,14 @@ def pipe_until( raise DelimiterError('expected delimiter missing') self._buffer_pos += delimiter_len - def exhaust(self): + def exhaust(self) -> None: self.pipe() - def delimit(self, delimiter): + def delimit(self, delimiter: bytes) -> BufferedReader: read = functools.partial(self.read_until, delimiter) return type(self)(read, self._normalize_size(None), self._chunk_size) - def readline(self, size=-1): + def readline(self, size: int = -1) -> bytes: size = self._normalize_size(size) result = self.read_until(b'\n', size) @@ -369,7 +388,7 @@ def readline(self, size=-1): return result + self.read(1) return result - def readlines(self, hint=-1): + def readlines(self, hint: int = -1) -> List[bytes]: # PERF(vytas): In Cython, bind types: # cdef Py_ssize_t read # cdef list result = [] @@ -391,14 +410,14 @@ def readlines(self, hint=-1): # --- implementing IOBase methods, the duck-typing way --- - def readable(self): + def readable(self) -> bool: """Return ``True`` always.""" return True - def seekable(self): + def seekable(self) -> bool: """Return ``False`` always.""" return False - def writeable(self): + def writeable(self) -> bool: """Return ``False`` always.""" return False diff --git a/falcon/util/structures.py b/falcon/util/structures.py index 13025cf4d..fc7ba2a88 100644 --- a/falcon/util/structures.py +++ b/falcon/util/structures.py @@ -25,9 +25,19 @@ things = falcon.CaseInsensitiveDict() """ +from __future__ import annotations from collections.abc import Mapping from collections.abc import MutableMapping +from typing import Any +from typing import Dict +from typing import ItemsView +from typing import Iterable +from typing import Iterator +from typing import KeysView +from typing import Optional +from typing import Tuple +from typing import ValuesView # TODO(kgriffs): If we ever diverge from what is upstream in Requests, @@ -61,34 +71,34 @@ class CaseInsensitiveDict(MutableMapping): # pragma: no cover """ - def __init__(self, data=None, **kwargs): - self._store = dict() + def __init__(self, data: Optional[Iterable[Tuple[str, Any]]] = None, **kwargs: Any): + self._store: Dict[str, Tuple[str, Any]] = dict() if data is None: data = {} self.update(data, **kwargs) - def __setitem__(self, key, value): + def __setitem__(self, key: str, value: Any) -> None: # Use the lowercased key for lookups, but store the actual # key alongside the value. self._store[key.lower()] = (key, value) - def __getitem__(self, key): + def __getitem__(self, key: str) -> Any: return self._store[key.lower()][1] - def __delitem__(self, key): + def __delitem__(self, key: str) -> None: del self._store[key.lower()] - def __iter__(self): + def __iter__(self) -> Iterator[str]: return (casedkey for casedkey, mappedvalue in self._store.values()) - def __len__(self): + def __len__(self) -> int: return len(self._store) - def lower_items(self): + def lower_items(self) -> Iterator[Tuple[str, Any]]: """Like iteritems(), but with all lowercase keys.""" return ((lowerkey, keyval[1]) for (lowerkey, keyval) in self._store.items()) - def __eq__(self, other): + def __eq__(self, other: object) -> bool: if isinstance(other, Mapping): other = CaseInsensitiveDict(other) else: @@ -97,10 +107,10 @@ def __eq__(self, other): return dict(self.lower_items()) == dict(other.lower_items()) # Copy is required - def copy(self): + def copy(self) -> CaseInsensitiveDict: return CaseInsensitiveDict(self._store.values()) - def __repr__(self): + def __repr__(self) -> str: return '%s(%r)' % (self.__class__.__name__, dict(self.items())) @@ -131,77 +141,80 @@ class Context: True """ - def __contains__(self, key): + def __contains__(self, key: str) -> bool: return self.__dict__.__contains__(key) - def __getitem__(self, key): + def __getitem__(self, key: str) -> Optional[Any]: # PERF(vytas): On CPython, using this mapping interface (instead of a # standard dict) to get, set and delete items incurs overhead # approximately comparable to that of two function calls # (per get/set/delete operation, that is). return self.__dict__.__getitem__(key) - def __setitem__(self, key, value): + def __setitem__(self, key: str, value: Any) -> None: return self.__dict__.__setitem__(key, value) - def __delitem__(self, key): + def __delitem__(self, key: str) -> None: self.__dict__.__delitem__(key) - def __iter__(self): + def __iter__(self) -> Iterator[str]: return self.__dict__.__iter__() - def __len__(self): + def __len__(self) -> int: return self.__dict__.__len__() - def __eq__(self, other): + def __eq__(self, other: object) -> bool: if isinstance(other, type(self)): return self.__dict__.__eq__(other.__dict__) return self.__dict__.__eq__(other) - def __ne__(self, other): + def __ne__(self, other: object) -> bool: if isinstance(other, type(self)): return self.__dict__.__ne__(other.__dict__) return self.__dict__.__ne__(other) - def __hash__(self): + def __hash__(self) -> int: return hash(self.__dict__) - def __repr__(self): + def __repr__(self) -> str: return '{}({})'.format(type(self).__name__, self.__dict__.__repr__()) - def __str__(self): + def __str__(self) -> str: return '{}({})'.format(type(self).__name__, self.__dict__.__str__()) - def clear(self): + def clear(self) -> None: return self.__dict__.clear() - def copy(self): + def copy(self) -> Context: ctx = type(self)() ctx.update(self.__dict__) return ctx - def get(self, key, default=None): + def get(self, key: str, default: Optional[Any] = None) -> Optional[Any]: return self.__dict__.get(key, default) - def items(self): + def items(self) -> ItemsView[str, Any]: return self.__dict__.items() - def keys(self): + def keys(self) -> KeysView[str]: return self.__dict__.keys() - def pop(self, key, default=None): + def pop(self, key: str, default: Optional[Any] = None) -> Optional[Any]: return self.__dict__.pop(key, default) - def popitem(self): + def popitem(self) -> Tuple[str, Any]: + return self.__dict__.popitem() - def setdefault(self, key, default_value=None): + def setdefault( + self, key: str, default_value: Optional[Any] = None + ) -> Optional[Any]: return self.__dict__.setdefault(key, default_value) - def update(self, items): + def update(self, items: dict[str, Any]) -> None: self.__dict__.update(items) - def values(self): + def values(self) -> ValuesView: return self.__dict__.values() @@ -243,7 +256,7 @@ def on_get(self, req, resp): is_weak = False - def strong_compare(self, other): + def strong_compare(self, other: ETag) -> bool: """Perform a strong entity-tag comparison. Two entity-tags are equivalent if both are not weak and their @@ -262,7 +275,7 @@ def strong_compare(self, other): return self == other and not (self.is_weak or other.is_weak) - def dumps(self): + def dumps(self) -> str: """Serialize the ETag to a string suitable for use in a precondition header. (See also: RFC 7232, Section 2.3) @@ -280,7 +293,7 @@ def dumps(self): return '"' + self + '"' @classmethod - def loads(cls, etag_str): + def loads(cls, etag_str: str) -> ETag: """Deserialize a single entity-tag string from a precondition header. Note: diff --git a/falcon/util/sync.py b/falcon/util/sync.py index 7b6107f19..db9e21e37 100644 --- a/falcon/util/sync.py +++ b/falcon/util/sync.py @@ -4,7 +4,12 @@ from functools import wraps import inspect import os +from typing import Any +from typing import Awaitable from typing import Callable +from typing import Optional +from typing import TypeVar +from typing import Union __all__ = [ @@ -17,14 +22,13 @@ 'wrap_sync_to_async_unsafe', ] - _one_thread_to_rule_them_all = ThreadPoolExecutor(max_workers=1) create_task = asyncio.create_task get_running_loop = asyncio.get_running_loop -def wrap_sync_to_async_unsafe(func) -> Callable: +def wrap_sync_to_async_unsafe(func: Callable[..., Any]) -> Callable[..., Any]: """Wrap a callable in a coroutine that executes the callable directly. This helper makes it easier to use synchronous callables with ASGI @@ -48,13 +52,15 @@ def wrap_sync_to_async_unsafe(func) -> Callable: """ @wraps(func) - async def wrapper(*args, **kwargs): + async def wrapper(*args: Any, **kwargs: Any) -> Callable[..., Any]: return func(*args, **kwargs) return wrapper -def wrap_sync_to_async(func, threadsafe=None) -> Callable: +def wrap_sync_to_async( + func: Callable[..., Any], threadsafe: Optional[bool] = None +) -> Callable[..., Any]: """Wrap a callable in a coroutine that executes the callable in the background. This helper makes it easier to call functions that can not be @@ -94,7 +100,7 @@ def wrap_sync_to_async(func, threadsafe=None) -> Callable: executor = _one_thread_to_rule_them_all @wraps(func) - async def wrapper(*args, **kwargs): + async def wrapper(*args: Any, **kwargs: Any) -> Any: return await get_running_loop().run_in_executor( executor, partial(func, *args, **kwargs) ) @@ -102,7 +108,9 @@ async def wrapper(*args, **kwargs): return wrapper -async def sync_to_async(func, *args, **kwargs): +async def sync_to_async( + func: Callable[..., Any], *args: Any, **kwargs: Any +) -> Callable[..., Awaitable[Any]]: """Schedule a synchronous callable on the default executor and await the result. This helper makes it easier to call functions that can not be @@ -153,7 +161,9 @@ def _should_wrap_non_coroutines() -> bool: return 'FALCON_ASGI_WRAP_NON_COROUTINES' in os.environ -def _wrap_non_coroutine_unsafe(func): +def _wrap_non_coroutine_unsafe( + func: Optional[Callable[..., Any]] +) -> Union[Callable[..., Awaitable[Any]], Callable[..., Any], None]: """Wrap a coroutine using ``wrap_sync_to_async_unsafe()`` for internal test cases. This method is intended for Falcon's own test suite and should not be @@ -180,7 +190,12 @@ def _wrap_non_coroutine_unsafe(func): return wrap_sync_to_async_unsafe(func) -def async_to_sync(coroutine, *args, **kwargs): +Result = TypeVar('Result') + + +def async_to_sync( + coroutine: Callable[..., Awaitable[Result]], *args: Any, **kwargs: Any +) -> Result: """Invoke a coroutine function from a synchronous caller. This method can be used to invoke an asynchronous task from a synchronous @@ -212,7 +227,7 @@ def async_to_sync(coroutine, *args, **kwargs): return loop.run_until_complete(coroutine(*args, **kwargs)) -def runs_sync(coroutine): +def runs_sync(coroutine: Callable[..., Awaitable[Result]]) -> Callable[..., Result]: """Transform a coroutine function into a synchronous method. This is achieved by always invoking the decorated coroutine function via @@ -234,7 +249,7 @@ def runs_sync(coroutine): """ @wraps(coroutine) - def invoke(*args, **kwargs): + def invoke(*args: Any, **kwargs: Any) -> Any: return async_to_sync(coroutine, *args, **kwargs) return invoke diff --git a/falcon/util/uri.py b/falcon/util/uri.py index e6078dfb9..5daa7c68a 100644 --- a/falcon/util/uri.py +++ b/falcon/util/uri.py @@ -22,8 +22,12 @@ name, port = uri.parse_host('example.org:8080') """ - +from typing import Callable +from typing import Dict +from typing import List +from typing import Optional from typing import Tuple, TYPE_CHECKING +from typing import Union from falcon.constants import PYPY @@ -52,7 +56,7 @@ } -def _create_char_encoder(allowed_chars): +def _create_char_encoder(allowed_chars: str) -> Callable[[int], str]: lookup = {} @@ -67,13 +71,15 @@ def _create_char_encoder(allowed_chars): return lookup.__getitem__ -def _create_str_encoder(is_value, check_is_escaped=False): +def _create_str_encoder( + is_value: bool, check_is_escaped: bool = False +) -> Callable[[str], str]: allowed_chars = _UNRESERVED if is_value else _ALL_ALLOWED allowed_chars_plus_percent = allowed_chars + '%' encode_char = _create_char_encoder(allowed_chars) - def encoder(uri): + def encoder(uri: str) -> str: # PERF(kgriffs): Very fast way to check, learned from urlib.quote if not uri.rstrip(allowed_chars): return uri @@ -107,7 +113,7 @@ def encoder(uri): # partially encoded, the caller will need to normalize it # before passing it in here. - uri = uri.encode() + encoded_uri = uri.encode() # Use our map to encode each char and join the result into a new uri # @@ -115,7 +121,7 @@ def encoder(uri): # CPython 3 (tested on CPython 3.5 and 3.7). A list comprehension # can be faster on PyPy3, but the difference is on the order of # nanoseconds in that case, so we aren't going to worry about it. - return ''.join(map(encode_char, uri)) + return ''.join(map(encode_char, encoded_uri)) return encoder @@ -143,7 +149,7 @@ def encoder(uri): """ encode_value = _create_str_encoder(True) -encode_value.name = 'encode_value' +encode_value.__name__ = 'encode_value' encode_value.__doc__ = """Encodes a value string according to RFC 3986. Disallowed characters are percent-encoded in a way that models @@ -171,7 +177,7 @@ def encoder(uri): """ encode_check_escaped = _create_str_encoder(False, True) -encode_check_escaped.name = 'encode_check_escaped' +encode_check_escaped.__name__ = 'encode_check_escaped' encode_check_escaped.__doc__ = """Encodes a full or relative URI according to RFC 3986. RFC 3986 defines a set of "unreserved" characters as well as a @@ -195,7 +201,7 @@ def encoder(uri): """ encode_value_check_escaped = _create_str_encoder(True, True) -encode_value_check_escaped.name = 'encode_value_check_escaped' +encode_value_check_escaped.__name__ = 'encode_value_check_escaped' encode_value_check_escaped.__doc__ = """Encodes a value string according to RFC 3986. RFC 3986 defines a set of "unreserved" characters as well as a @@ -224,7 +230,7 @@ def encoder(uri): """ -def _join_tokens_bytearray(tokens): +def _join_tokens_bytearray(tokens: List[bytes]) -> str: decoded_uri = bytearray(tokens[0]) for token in tokens[1:]: token_partial = token[:2] @@ -238,7 +244,7 @@ def _join_tokens_bytearray(tokens): return decoded_uri.decode('utf-8', 'replace') -def _join_tokens_list(tokens): +def _join_tokens_list(tokens: List[bytes]) -> str: decoded = tokens[:1] # PERF(vytas): Do not copy list: a simple bool flag is fastest on PyPy JIT. skip = True @@ -270,7 +276,7 @@ def _join_tokens_list(tokens): _join_tokens = _join_tokens_list if PYPY else _join_tokens_bytearray -def decode(encoded_uri, unquote_plus=True): +def decode(encoded_uri: str, unquote_plus: bool = True) -> str: """Decode percent-encoded characters in a URI or query string. This function models the behavior of `urllib.parse.unquote_plus`, @@ -306,36 +312,38 @@ def decode(encoded_uri, unquote_plus=True): # NOTE(kgriffs): Clients should never submit a URI that has # unescaped non-ASCII chars in them, but just in case they # do, let's encode into a non-lossy format. - decoded_uri = decoded_uri.encode() + reencoded_uri = decoded_uri.encode() # PERF(kgriffs): This was found to be faster than using # a regex sub call or list comprehension with a join. - tokens = decoded_uri.split(b'%') + tokens = reencoded_uri.split(b'%') # PERF(vytas): Just use in-place add for a low number of items: if len(tokens) < 8: - decoded_uri = tokens[0] + reencoded_uri = tokens[0] for token in tokens[1:]: token_partial = token[:2] try: - decoded_uri += _HEX_TO_BYTE[token_partial] + token[2:] + reencoded_uri += _HEX_TO_BYTE[token_partial] + token[2:] except KeyError: # malformed percentage like "x=%" or "y=%+" - decoded_uri += b'%' + token + reencoded_uri += b'%' + token # Convert back to str - return decoded_uri.decode('utf-8', 'replace') + return reencoded_uri.decode('utf-8', 'replace') # NOTE(vytas): Decode percent-encoded bytestring fragments and join them # back to a string using the platform-dependent method. return _join_tokens(tokens) -def parse_query_string(query_string: str, keep_blank: bool = False, csv: bool = True): +def parse_query_string( + query_string: str, keep_blank: bool = False, csv: bool = False +) -> Dict[str, Union[str, List[str]]]: """Parse a query string into a dict. Query string parameters are assumed to use standard form-encoding. Only parameters with values are returned. For example, given 'foo=bar&flag', - this function would ignore 'flag' unless the `keep_blank_qs_values` option + this function would ignore 'flag' unless the `keep_blank` option is set. Note: @@ -343,6 +351,8 @@ def parse_query_string(query_string: str, keep_blank: bool = False, csv: bool = lists by repeating a given param multiple times, Falcon supports a more compact form in which the param may be given a single time but set to a ``list`` of comma-separated elements (e.g., 'foo=a,b,c'). + This comma-separated format can be enabled by setting the `csv` + option (see below) to ``True``. When using this format, all commas uri-encoded will not be treated by Falcon as a delimiter. If the client wants to send a value as a list, @@ -357,12 +367,13 @@ def parse_query_string(query_string: str, keep_blank: bool = False, csv: bool = they do not have a value (default ``False``). For comma-separated values, this option also determines whether or not empty elements in the parsed list are retained. - csv: Set to ``False`` in order to disable splitting query - parameters on ``,`` (default ``True``). Depending on the user agent, - encoding lists as multiple occurrences of the same parameter might - be preferable. In this case, setting `parse_qs_csv` to ``False`` - will cause the framework to treat commas as literal characters in - each occurring parameter value. + csv: Set to ``True`` in order to enable splitting query + parameters on ``,`` (default ``False``). + Depending on the user agent, encoding lists as multiple occurrences + of the same parameter might be preferable. In this case, keeping + `parse_qs_csv` at its default value (``False``) will cause the + framework to treat commas as literal characters in each occurring + parameter value. Returns: dict: A dictionary of (*name*, *value*) pairs, one per query @@ -455,7 +466,9 @@ def parse_query_string(query_string: str, keep_blank: bool = False, csv: bool = return params -def parse_host(host: str, default_port=None) -> Tuple[str, int]: +def parse_host( + host: str, default_port: Optional[int] = None +) -> Tuple[str, Optional[int]]: """Parse a canonical 'host:port' string into parts. Parse a host string (which may or may not contain a port) into @@ -506,7 +519,7 @@ def parse_host(host: str, default_port=None) -> Tuple[str, int]: return (name, int(port)) -def unquote_string(quoted): +def unquote_string(quoted: str) -> str: """Unquote an RFC 7320 "quoted-string". Args: diff --git a/pyproject.toml b/pyproject.toml index b386f7a2f..ad445ce55 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,6 @@ [tool.mypy] exclude = "falcon/bench/|falcon/cmd/" - [[tool.mypy.overrides]] module = [ "cbor2", @@ -34,6 +33,13 @@ ] ignore_missing_imports = true + [[tool.mypy.overrides]] + module = [ + "falcon.stream", + "falcon.util.*" + ] + disallow_untyped_defs = true + [tool.towncrier] package = "falcon" package_dir = "" @@ -90,6 +96,8 @@ filterwarnings = [ "ignore:inspect.getargspec\\(\\) is deprecated:DeprecationWarning", "ignore:.cgi. is deprecated and slated for removal:DeprecationWarning", "ignore:path is deprecated\\. Use files\\(\\) instead:DeprecationWarning", + "ignore:This process \\(.+\\) is multi-threaded", + "ignore:There is no current event loop", ] testpaths = [ "tests" diff --git a/requirements/tests b/requirements/tests index 187bb054e..19b34bcd3 100644 --- a/requirements/tests +++ b/requirements/tests @@ -7,7 +7,8 @@ requests testtools; python_version < '3.10' # ASGI Specific (daphne is installed on a its own tox env) -pytest-asyncio +# TODO(vytas): Some ASGI tests hang with pytest-asyncio-0.23 on 3.8 & 3.9. +pytest-asyncio < 0.22.0 aiofiles httpx uvicorn >= 0.17.0 @@ -24,3 +25,6 @@ python-rapidjson; platform_machine != 's390x' and platform_machine != 'aarch64' # wheels are missing some EoL interpreters and non-x86 platforms; build would fail unless rust is available orjson; platform_python_implementation != 'PyPy' and platform_machine != 's390x' and platform_machine != 'aarch64' + +# Images for 3.7 on emulated architectures seem to only have OpenSSL 1.0.2 +urllib3 < 2.0; python_version <= '3.7' diff --git a/setup.cfg b/setup.cfg index 15de77818..e7c308429 100644 --- a/setup.cfg +++ b/setup.cfg @@ -29,6 +29,7 @@ classifiers = Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 + Programming Language :: Python :: 3.12 Programming Language :: Cython keywords = asgi diff --git a/tests/_util.py b/tests/_util.py index d44cfd4ad..85070a550 100644 --- a/tests/_util.py +++ b/tests/_util.py @@ -1,6 +1,8 @@ from contextlib import contextmanager import os +import pytest + import falcon import falcon.asgi import falcon.testing @@ -65,3 +67,13 @@ def disable_asgi_non_coroutine_wrapping(): if should_wrap: os.environ['FALCON_ASGI_WRAP_NON_COROUTINES'] = 'Y' + + +def as_params(*values, prefix=None): + if not prefix: + prefix = '' + # NOTE(caselit): each value must be a tuple/list even when using one single argument + return [ + pytest.param(*value, id=f'{prefix}_{i}' if prefix else f'{i}') + for i, value in enumerate(values, 1) + ] diff --git a/tests/asgi/test_ws.py b/tests/asgi/test_ws.py index c96ed3e0f..f45ea9758 100644 --- a/tests/asgi/test_ws.py +++ b/tests/asgi/test_ws.py @@ -10,6 +10,7 @@ from falcon import media, testing from falcon.asgi import App from falcon.asgi.ws import _WebSocketState as ServerWebSocketState +from falcon.asgi.ws import WebSocket from falcon.asgi.ws import WebSocketOptions from falcon.testing.helpers import _WebSocketState as ClientWebSocketState @@ -370,6 +371,13 @@ async def on_websocket(self, req, ws): await resource.data_received.wait() assert resource.data == sample_data + # NOTE(vytas): When testing the case where the server + # explicitly closes the connection, try to receive some data + # before closing from the client side (and potentially + # winning the async race of which side closes first). + if explicit_close_server: + await ws.receive_data() + if explicit_close_client: await ws.close(4042) @@ -1081,6 +1089,34 @@ class Resource: event = await ws._emit() +@pytest.mark.asyncio +async def test_ws_responder_never_ready(conductor, monkeypatch): + async def noop_close(obj, code=None): + pass + + class SleepyResource: + async def on_websocket(self, req, ws): + for i in range(10): + await asyncio.sleep(0.001) + + conductor.app.add_route('/', SleepyResource()) + + # NOTE(vytas): It seems that it is hard to impossible to hit the second + # `await ready_waiter` of the _WSContextManager on CPython 3.12 due to + # different async code optimizations, so we mock away WebSocket.close. + monkeypatch.setattr(WebSocket, 'close', noop_close) + + # NOTE(vytas): Shorten the timeout so that we do not wait for 5 seconds. + monkeypatch.setattr( + testing.ASGIWebSocketSimulator, '_DEFAULT_WAIT_READY_TIMEOUT', 0.5 + ) + + async with conductor as c: + with pytest.raises(asyncio.TimeoutError): + async with c.simulate_ws(): + pass + + @pytest.mark.skipif(msgpack, reason='test requires msgpack lib to be missing') def test_msgpack_missing(): @@ -1111,6 +1147,9 @@ async def on_websocket(self, req, ws): async with conductor as c: if accept: async with c.simulate_ws() as ws: + # Make sure the responder has a chance to reach the raise point + for _ in range(3): + await asyncio.sleep(0) assert ws.closed assert ws.close_code == exp_code else: @@ -1208,6 +1247,9 @@ async def handle_foobar(req, resp, ex, param): # type: ignore async with conductor as c: if place == 'ws_after_accept': async with c.simulate_ws() as ws: + # Make sure the responder has a chance to reach the raise point + for _ in range(3): + await asyncio.sleep(0) assert ws.closed assert ws.close_code == exp_code else: diff --git a/tests/test_cookies.py b/tests/test_cookies.py index 72759f0e5..1d2e0c847 100644 --- a/tests/test_cookies.py +++ b/tests/test_cookies.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta, tzinfo +from datetime import datetime, timedelta, timezone, tzinfo from http import cookies as http_cookies import re @@ -28,6 +28,10 @@ def dst(self, dt): GMT_PLUS_ONE = TimezoneGMTPlus1() +def utcnow_naive(): + return datetime.now(timezone.utc).replace(tzinfo=None) + + class CookieResource: def on_get(self, req, resp): resp.set_cookie('foo', 'bar', domain='example.com', path='/') @@ -171,7 +175,7 @@ def test_response_complex_case(client): assert cookie.domain is None assert cookie.same_site == 'Lax' - assert cookie.expires < datetime.utcnow() + assert cookie.expires < utcnow_naive() # NOTE(kgriffs): I know accessing a private attr like this is # naughty of me, but we just need to sanity-check that the @@ -193,7 +197,7 @@ def test(cookie, path, domain, samesite='Lax'): assert cookie.domain == domain assert cookie.path == path assert cookie.same_site == samesite - assert cookie.expires < datetime.utcnow() + assert cookie.expires < utcnow_naive() test(result.cookies['foo'], path=None, domain=None) test(result.cookies['bar'], path='/bar', domain=None) @@ -231,7 +235,7 @@ def test_set(cookie, value, samesite=None): def test_unset(cookie, samesite='Lax'): assert cookie.value == '' # An unset cookie has an empty value assert cookie.same_site == samesite - assert cookie.expires < datetime.utcnow() + assert cookie.expires < utcnow_naive() test_unset(result_unset.cookies['foo'], samesite='Strict') # default: bar is unset with no samesite param, so should go to Lax @@ -325,7 +329,7 @@ def test_response_unset_cookie(client): assert match expiration = http_date_to_dt(match.group(1), obs_date=True) - assert expiration < datetime.utcnow() + assert expiration < utcnow_naive() def test_cookie_timezone(client): diff --git a/tests/test_error_handlers.py b/tests/test_error_handlers.py index c43589dbb..06282bdc7 100644 --- a/tests/test_error_handlers.py +++ b/tests/test_error_handlers.py @@ -3,6 +3,7 @@ import falcon from falcon import constants, testing import falcon.asgi +from falcon.util.deprecation import DeprecatedWarning from _util import create_app, disable_asgi_non_coroutine_wrapping # NOQA @@ -212,9 +213,12 @@ def legacy_handler3(err, rq, rs, prms): app.add_route('/', ErroredClassResource()) client = testing.TestClient(app) - client.app.add_error_handler(Exception, legacy_handler1) - client.app.add_error_handler(CustomBaseException, legacy_handler2) - client.app.add_error_handler(CustomException, legacy_handler3) + with pytest.warns(DeprecatedWarning, match='deprecated signature'): + client.app.add_error_handler(Exception, legacy_handler1) + with pytest.warns(DeprecatedWarning, match='deprecated signature'): + client.app.add_error_handler(CustomBaseException, legacy_handler2) + with pytest.warns(DeprecatedWarning, match='deprecated signature'): + client.app.add_error_handler(CustomException, legacy_handler3) client.simulate_delete() client.simulate_get() diff --git a/tests/test_headers.py b/tests/test_headers.py index 63c5170fb..bf9d47536 100644 --- a/tests/test_headers.py +++ b/tests/test_headers.py @@ -6,6 +6,8 @@ import falcon from falcon import testing from falcon.util.deprecation import DeprecatedWarning +from falcon.util.misc import _utcnow + from _util import create_app # NOQA @@ -31,7 +33,7 @@ def __init__(self, last_modified=None): if last_modified is not None: self.last_modified = last_modified else: - self.last_modified = datetime.utcnow() + self.last_modified = _utcnow() def _overwrite_headers(self, req, resp): resp.content_type = 'x-falcon/peregrine' diff --git a/tests/test_middleware.py b/tests/test_middleware.py index 5b794c7aa..b6b1b9d30 100644 --- a/tests/test_middleware.py +++ b/tests/test_middleware.py @@ -1,4 +1,3 @@ -from datetime import datetime import json try: @@ -10,6 +9,8 @@ import falcon import falcon.errors import falcon.testing as testing +from falcon.util.deprecation import DeprecatedWarning +from falcon.util.misc import _utcnow from _util import create_app # NOQA @@ -36,15 +37,15 @@ def process_request(self, req, resp): class RequestTimeMiddleware: def process_request(self, req, resp): global context - context['start_time'] = datetime.utcnow() + context['start_time'] = _utcnow() def process_resource(self, req, resp, resource, params): global context - context['mid_time'] = datetime.utcnow() + context['mid_time'] = _utcnow() def process_response(self, req, resp, resource, req_succeeded): global context - context['end_time'] = datetime.utcnow() + context['end_time'] = _utcnow() context['req_succeeded'] = req_succeeded async def process_request_async(self, req, resp): @@ -900,14 +901,19 @@ def test_http_status_raised_from_error_handler(self, asgi): def _http_error_handler(error, req, resp, params): raise falcon.HTTPStatus(falcon.HTTP_201) - async def _http_error_handler_async(error, req, resp, params): + async def _http_error_handler_async(req, resp, error, params): raise falcon.HTTPStatus(falcon.HTTP_201) h = _http_error_handler_async if asgi else _http_error_handler # NOTE(kgriffs): This will take precedence over the default # handler for facon.HTTPError. - app.add_error_handler(falcon.HTTPError, h) + if asgi: + # NOTE(vytas): The ASGI flavour supports no reordering shim. + app.add_error_handler(falcon.HTTPError, h) + else: + with pytest.warns(DeprecatedWarning, match='deprecated signature'): + app.add_error_handler(falcon.HTTPError, h) response = client.simulate_request(path='/', method='POST') assert response.status == falcon.HTTP_201 @@ -994,20 +1000,28 @@ def test_process_resource_cached(self, asgi, independent_middleware): class TestCORSMiddlewareWithAnotherMiddleware(TestMiddleware): @pytest.mark.parametrize( - 'mw', + 'mw,allowed', [ - CaptureResponseMiddleware(), - [CaptureResponseMiddleware()], - (CaptureResponseMiddleware(),), - iter([CaptureResponseMiddleware()]), + (CaptureResponseMiddleware(), True), + ([CaptureResponseMiddleware()], True), + ((CaptureResponseMiddleware(),), True), + (iter([CaptureResponseMiddleware()]), True), + (falcon.CORSMiddleware(), False), + ([falcon.CORSMiddleware()], False), ], ) - def test_api_initialization_with_cors_enabled_and_middleware_param(self, mw, asgi): - app = create_app(asgi, middleware=mw, cors_enable=True) - app.add_route('/', TestCorsResource()) - client = testing.TestClient(app) - result = client.simulate_get(headers={'Origin': 'localhost'}) - assert result.headers['Access-Control-Allow-Origin'] == '*' + def test_api_initialization_with_cors_enabled_and_middleware_param( + self, mw, asgi, allowed + ): + if allowed: + app = create_app(asgi, middleware=mw, cors_enable=True) + app.add_route('/', TestCorsResource()) + client = testing.TestClient(app) + result = client.simulate_get(headers={'Origin': 'localhost'}) + assert result.headers['Access-Control-Allow-Origin'] == '*' + else: + with pytest.raises(ValueError, match='CORSMiddleware'): + app = create_app(asgi, middleware=mw, cors_enable=True) @pytest.mark.skipif(cython, reason='Cythonized coroutine functions cannot be detected') diff --git a/tests/test_request_attrs.py b/tests/test_request_attrs.py index ba0f7e293..466a0c875 100644 --- a/tests/test_request_attrs.py +++ b/tests/test_request_attrs.py @@ -110,7 +110,7 @@ def test_subdomain(self, asgi): # NOTE(kgriffs): Behavior for IP addresses is undefined, # so just make sure it doesn't blow up. req = create_req(asgi, host='127.0.0.1', path='/hello', headers=self.headers) - assert type(req.subdomain) == str + assert type(req.subdomain) is str # NOTE(kgriffs): Test fallback to SERVER_NAME by using # HTTP 1.0, which will cause .create_environ to not set diff --git a/tests/test_request_media.py b/tests/test_request_media.py index 31305b5c4..23654cc27 100644 --- a/tests/test_request_media.py +++ b/tests/test_request_media.py @@ -184,7 +184,7 @@ def test_invalid_json(asgi): try: json.loads(expected_body) except Exception as e: - assert type(client.resource.captured_error.value.__cause__) == type(e) + assert type(client.resource.captured_error.value.__cause__) is type(e) assert str(client.resource.captured_error.value.__cause__) == str(e) @@ -210,7 +210,7 @@ def test_invalid_msgpack(asgi): try: msgpack.unpackb(expected_body.encode('utf-8')) except Exception as e: - assert type(client.resource.captured_error.value.__cause__) == type(e) + assert type(client.resource.captured_error.value.__cause__) is type(e) assert str(client.resource.captured_error.value.__cause__) == str(e) diff --git a/tests/test_uri_converters.py b/tests/test_uri_converters.py index 1a5c57919..7efc39fbf 100644 --- a/tests/test_uri_converters.py +++ b/tests/test_uri_converters.py @@ -3,6 +3,7 @@ import string import uuid +from _util import as_params import pytest from falcon.routing import converters @@ -150,7 +151,7 @@ def test_datetime_converter_default_format(): @pytest.mark.parametrize( 'value, expected', - [ + as_params( (_TEST_UUID_STR, _TEST_UUID), (_TEST_UUID_STR.replace('-', '', 1), _TEST_UUID), (_TEST_UUID_STR_SANS_HYPHENS, _TEST_UUID), @@ -163,7 +164,8 @@ def test_datetime_converter_default_format(): (_TEST_UUID_STR[0], None), (_TEST_UUID_STR[:-1] + 'g', None), (_TEST_UUID_STR.replace('-', '_'), None), - ], + prefix='uuid', + ), ) def test_uuid_converter(value, expected): c = converters.UUIDConverter() diff --git a/tests/test_uri_templates.py b/tests/test_uri_templates.py index 7f6c72b32..448209a5a 100644 --- a/tests/test_uri_templates.py +++ b/tests/test_uri_templates.py @@ -15,7 +15,7 @@ from falcon import testing from falcon.routing.util import SuffixedMethodNotFoundError -from _util import create_app # NOQA +from _util import as_params, create_app # NOQA _TEST_UUID = uuid.uuid4() @@ -314,7 +314,7 @@ def test_datetime_converter(client, resource, uri_template, path, dt_expected): @pytest.mark.parametrize( 'uri_template, path, expected', - [ + as_params( ( '/widgets/{widget_id:uuid}', '/widgets/' + _TEST_UUID_STR, @@ -354,7 +354,8 @@ def test_datetime_converter(client, resource, uri_template, path, dt_expected): '/widgets/' + _TEST_UUID_STR_SANS_HYPHENS[:-1] + '/orders', None, ), - ], + prefix='uuid_converter', + ), ) def test_uuid_converter(client, resource, uri_template, path, expected): client.app.add_route(uri_template, resource) diff --git a/tests/test_utils.py b/tests/test_utils.py index 83a0e1f22..349032856 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -from datetime import datetime +from datetime import datetime, timezone import functools import http import itertools @@ -109,13 +109,12 @@ def old_thing(): assert msg in str(warn.message) def test_http_now(self): - expected = datetime.utcnow() + expected = datetime.now(timezone.utc) actual = falcon.http_date_to_dt(falcon.http_now()) - delta = actual - expected - delta_sec = abs(delta.days * 86400 + delta.seconds) + delta = actual.replace(tzinfo=timezone.utc) - expected - assert delta_sec <= 1 + assert delta.total_seconds() <= 1 def test_dt_to_http(self): assert ( @@ -381,13 +380,13 @@ def test_parse_query_string(self): result = uri.parse_query_string(query_string) assert result['a'] == decoded_url assert result['b'] == decoded_json - assert result['c'] == ['1', '2', '3'] + assert result['c'] == '1,2,3' assert result['d'] == 'test' - assert result['e'] == ['a', '&=,'] + assert result['e'] == 'a,,&=,' assert result['f'] == ['a', 'a=b'] assert result['é'] == 'a=b' - result = uri.parse_query_string(query_string, True) + result = uri.parse_query_string(query_string, True, True) assert result['a'] == decoded_url assert result['b'] == decoded_json assert result['c'] == ['1', '2', '3'] @@ -396,6 +395,15 @@ def test_parse_query_string(self): assert result['f'] == ['a', 'a=b'] assert result['é'] == 'a=b' + result = uri.parse_query_string(query_string, csv=True) + assert result['a'] == decoded_url + assert result['b'] == decoded_json + assert result['c'] == ['1', '2', '3'] + assert result['d'] == 'test' + assert result['e'] == ['a', '&=,'] + assert result['f'] == ['a', 'a=b'] + assert result['é'] == 'a=b' + @pytest.mark.parametrize( 'query_string,keep_blank,expected', [ diff --git a/tests/test_validators.py b/tests/test_validators.py index 51450c7e5..a7f5ed273 100644 --- a/tests/test_validators.py +++ b/tests/test_validators.py @@ -1,4 +1,4 @@ -import typing +import typing # NOQA: F401 try: import jsonschema as _jsonschema # NOQA diff --git a/tox.ini b/tox.ini index 98ba87223..1715258c5 100644 --- a/tox.ini +++ b/tox.ini @@ -172,6 +172,11 @@ commands = python "{toxinidir}/tools/clean.py" "{toxinidir}/falcon" [with-cython] deps = -r{toxinidir}/requirements/tests Cython + # NOTE(vytas): By using --no-build-isolation, we need to manage build + # deps ourselves, and on CPython 3.12, it seems even setuptools + # (our PEP 517 backend of choice) is not guaranteed to be there. + setuptools + wheel setenv = PIP_CONFIG_FILE={toxinidir}/pip.conf FALCON_DISABLE_CYTHON= @@ -216,6 +221,13 @@ deps = {[with-cython]deps} setenv = {[with-cython]setenv} commands = {[with-cython]commands} +[testenv:py312_cython] +basepython = python3.12 +install_command = {[with-cython]install_command} +deps = {[with-cython]deps} +setenv = {[with-cython]setenv} +commands = {[with-cython]commands} + # -------------------------------------------------------------------- # WSGI servers (Cythonized Falcon) # -------------------------------------------------------------------- @@ -261,8 +273,7 @@ commands = {[smoke-test]commands} # -------------------------------------------------------------------- [testenv:pep8] -# TODO(vytas): Unpin flake8 when the below plugins have caught up. -deps = flake8<6.0 +deps = flake8 flake8-quotes flake8-import-order commands = flake8 [] @@ -286,8 +297,7 @@ commands = flake8 \ [] [testenv:pep8-examples] -# TODO(vytas): Unpin flake8 when the below plugins have caught up. -deps = flake8<6.0 +deps = flake8 flake8-quotes flake8-import-order